diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5d7c42b..97a8cda 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -3,10 +3,11 @@ name: Unit Tests on: pull_request: paths: - - 'src/**' - - '.mocharc.js' + - 'packages/*/src/**' + - 'packages/*/package.json' + - 'packages/*/tsconfig.json' + - 'packages/*/.mocharc.js' - 'package.json' - - 'tsconfig.json' jobs: run_tests: @@ -15,10 +16,10 @@ jobs: steps: - name: Checkout the repo uses: actions/checkout@v4 - - name: Install dependencies - id: install + - name: Build all dependencies + id: build run: | - npm install + ./bin/build.sh - name: Run Tests id: test run: | diff --git a/Dockerfile b/Dockerfile index f11fe88..8b7e641 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,18 +5,16 @@ WORKDIR /build COPY . . -WORKDIR /build - RUN apt-get update && \ apt-get install -y \ --no-install-recommends \ - python3 \ - build-essential \ - ca-certificates && \ + python3 \ + build-essential \ + ca-certificates && \ useradd -u 10005 dwebapi && \ - tail -n 1 /etc/passwd >/etc/passwd.scratch && \ - npm install && \ - npm run build + tail -n 1 /etc/passwd >/etc/passwd.scratch + +RUN ./bin/build.sh FROM gcr.io/distroless/nodejs20-debian12 as runtime @@ -27,14 +25,15 @@ WORKDIR /app # Copy node_modules until working fix with npm run build. COPY --from=build --chown=10005:10005 /build/node_modules ./node_modules COPY --from=build --chown=10005:10005 /build/package.json . -COPY --from=build --chown=10005:10005 /build/tsconfig.json . -COPY --from=build --chown=10005:10005 /build/dist ./dist +COPY --from=build --chown=10005:10005 /build/packages ./packages COPY --from=build /etc/ssl /etc/ssl COPY --from=build /etc/passwd.scratch /etc/passwd USER dwebapi +EXPOSE 8080 9090 11000 + # Node options to use openssl CA certificates ENV NODE_OPTIONS="--import=extensionless/register --use-openssl-ca" -CMD ["dist/index.js"] \ No newline at end of file +CMD ["packages/dweb-api-server/dist/index.js"] diff --git a/README.md b/README.md index 39db408..714c0f4 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,9 @@ -# [eth.limo](https://eth.limo) dWeb Gateway API +# ENS dWeb Gateway API -Backend service API for use with reverse proxies to deploy an HTTP [ENS](https://ens.domains) or [GNS](https://genomedomains.com/) gateway capable of resolving [IPFS](https://docs.ipfs.tech/), [IPNS](https://docs.ipfs.tech/how-to/publish-ipns/), [Arweave](https://www.arweave.org/), [Arweave Naming Service (ARNS)](https://docs.ar.io/arns/#overview), and [Swarm](https://www.ethswarm.org/) content. +Backend service API for use with reverse proxies to deploy an HTTP [ENS](https://ens.domains)/[GNS](https://genomedomains.com/) gateway capable of resolving [IPFS](https://docs.ipfs.tech/), [IPNS](https://docs.ipfs.tech/how-to/publish-ipns/), [Arweave](https://www.arweave.org/), [Arweave Name System (ArNS)](https://docs.ar.io/arns/#overview), and [Swarm](https://www.ethswarm.org/) content. -Upstream proxies can forward ENS and GNS hostnames for resolution and properly route them to the appropriate storage gateway path and destination via the following response headers: +Upstream proxies can forward ENS and GNS hostnames for resolution and properly route them to the appropriate storage gateway path and destination via the following response headers (IPFS example below): -IPFS example: ``` X-Content-Location: ${cid}.ipfs.dweb.link X-Content-Path: / @@ -15,17 +14,17 @@ __Gateway request flow__ ![alt text](./images/flow.jpg "Example resolution and request data flow") -### Configuration +## Configuration -| Environment Variable | Default | Purpose | +| Environment Variable | Default | Description | | ------------- |:-------------:| -----:| | `LISTEN_PORT` | `8888` | Proxy API listener port. | -| `IPFS_SUBDOMAIN_SUPPORT` | `"false"` | Return IPFS gateway destination in subdomain format, i.e. `${cid\|peerId}.${ipfs\|ipns}.dweb.link`. Otherwise results are returned as `dweb.link/ipfs/${cid}`. | +| `IPFS_SUBDOMAIN_SUPPORT` | `"false"` | Return IPFS gateway destination in subdomain format, i.e. `${cid\|peerId}.${ipfs\|ipns}.dweb.link`. Otherwise results are returned as `dweb.link/ipfs/${cid}`. Note that dweb.link is just an example and not a default value in this context. Please see `IPFS_TARGET` for more information.| | `IPFS_AUTH_KEY` | `null` | Basic authentication for `IPFS_KUBO_API_URL`. | | `IPFS_KUBO_API_URL` | `undefined` | URL to Kubo `/api/v0/name/resolve` service. This setting performs IPNS name resolution and PeerId conversion to CIDv1 identifiers during the contentHash lookup process. Note, this does not enable or disable IPNS support (as this is performed by the IPFS backend) but rather attempts to use resolved CID values as cache keys as opposed to peerIds. Please read the official IPFS [documentation](https://docs.ipfs.tech/reference/kubo/rpc/#api-v0-name-resolve) for more information. | | `ARWEAVE_TARGET` | `"https://arweave.net"` | Arweave gateway FQDN. | | `SWARM_TARGET` | `"https://api.gateway.ethswarm.org"` | Swarm gateway FQDN. | -| `IPFS_TARGET` | `http://127.0.0.1:8080` | FQDN of IPFS gateway backend to use for requests. | +| `IPFS_TARGET` | `http://localhost:8080` | FQDN of IPFS gateway backend to use for requests. | | `REDIS_URL` | `"redis://127.0.0.1:6379"` | Redis server endpoint. | | `CACHE_TTL` | `"300"` | TTL to persist resolved records | | `ASK_ENABLED` | `"false"` | Whether to spawn a special listener for responding to @@ -42,22 +41,31 @@ __Gateway request flow__ | `PURGE_CACHE_ON_START` | `"false"` | Indicates whether to purge the entire Redis cache upon server startup. | | `PURGE_CACHE_COUNT` | `"20000"` | Number of keys to purge if `PURGE_CACHE_ON_START` is enabled. | | `PURGE_CACHE_PATTERN` | `"*.${DOMAIN_TLD_HOSTNAME}"` | Key pattern to purge if `PURGE_CACHE_ON_START` is enabled. | +| `SW_BUNDLE_PUBLIC_URL` | `""` | Optional value if using service workers instead of the API. Set this to the parent wildcard domain you will be serving traffic from, i.e. setting this value to `eth.example.com` would support `ens.eth.example.com`, etc. | +| `SERVICE_WORKER_TRUSTLESS` | `"false"` | Optional value if using service workers instead of the API. Set this to `"true"` to enable [trustless IPFS gateway mode](https://specs.ipfs.tech/http-gateways/trustless-gateway/). You must also set `IPFS_TARGET` to the hostname of a gateway running in trustless mode. | -### Local Example +## Quickstart -1. Start Redis +1. Start Redis (using any method) ``` -podman run -p 127.0.0.1:6379:6379 docker.io/library/redis +podman pull docker.io/library/redis +podman run --net=host docker.io/library/redis ``` -2. Configure the necessary environment listed above +(Note you can also use `docker` instead of `podman`) -3. Start dWeb Proxy API +2. Configure the necessary environment variables listed above + +3. Start the ENS dWeb Proxy API ``` -npm install -npm run dev +./bin/build.sh + +# (optional) run test suites +npm run test + +./bin/runDev.sh ``` 4. Make a request @@ -76,7 +84,21 @@ Keep-Alive: timeout=5 Transfer-Encoding: chunked ``` -__Use with Caddy server as a local gateway__ +### Container example + +``` +podman pull docker.io/library/redis +podman run --net=host docker.io/library/redis + +buildah bud -t dweb-api-proxy . + +# Make sure to pass the necessary environment variables with "-e" flags +podman run --rm -it --net=host -e "ETH_RPC_ENDPOINT=${ETH_RPC_ENDPOINT}" dweb-api-proxy +``` + +(Note you can also use `docker` instead of `buildah`) + +### Running a local gateway with Caddy server Start `dweb-proxy-api` with the correct environment variables and install [Caddy server](https://github.com/caddyserver/caddy). @@ -150,4 +172,8 @@ For example, using `/etc/hosts`: ::1 localhost ens.eth ``` -Save the file, launch Caddy (`caddy run`) and then open a browser and navigate to `https://ens.eth:8443`. \ No newline at end of file +Save the file, launch Caddy (`caddy run`) and then open a browser and navigate to `https://ens.eth:8443`. + +## Service Workers + +All static assets for supporting service worker resolution are located in `packages/dweb-api-serviceworker/dist`. We recommend using an HTTP server such as Caddy or Nginx to serve this content (any CDN will work as well). The `SW_BUNDLE_PUBLIC_URL` environment variable should be set to the domain you will be serving traffic from. For example, if you are serving traffic from `*.eth.example.com`, set `SW_BUNDLE_PUBLIC_URL` to `eth.example.com` in order to resolve `ens.eth.example.com`. \ No newline at end of file diff --git a/bin/build.sh b/bin/build.sh new file mode 100755 index 0000000..a321c5f --- /dev/null +++ b/bin/build.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env sh +cd bin || exit 1 +packages=$(node ./list_packages.js) +cd .. +npm i +echo "${PWD}" +for package in ${packages}; do + echo "Building ${package}" + npm i -w "${package}" + npm run build -w "${package}" +done diff --git a/bin/list_packages.js b/bin/list_packages.js new file mode 100644 index 0000000..c620b0c --- /dev/null +++ b/bin/list_packages.js @@ -0,0 +1,6 @@ +import pkg from "../package.json" with {type: "json"} + + +pkg.workspaces.forEach((workspace) => { + console.log(workspace) +}); \ No newline at end of file diff --git a/bin/package.json b/bin/package.json new file mode 100644 index 0000000..4cca935 --- /dev/null +++ b/bin/package.json @@ -0,0 +1,10 @@ +{ + "name": "bin", + "version": "1.0.0", + "description": "this only exists for json imports", + "type": "module", + "author": "Your Name", + "license": "MIT", + "dependencies": { + } +} \ No newline at end of file diff --git a/bin/runDev.sh b/bin/runDev.sh new file mode 100755 index 0000000..5ccf0d3 --- /dev/null +++ b/bin/runDev.sh @@ -0,0 +1,9 @@ +#!/bin/env bash + +IPFS_SUBDOMAIN_SUPPORT=true +ASK_ENABLED=true +DNSQUERY_ENABLED=true + +npm i +npm i --workspaces +npm run dev -w packages/dweb-api-server \ No newline at end of file diff --git a/package.json b/package.json index eae1a67..6456e69 100644 --- a/package.json +++ b/package.json @@ -1,76 +1,51 @@ { - "name": "dweb-api", - "version": "1.0.0", - "description": "Proxy middleware for ENS and other on chain naming services", - "main": "index.js", - "scripts": { - "dev": "nodemon -e js,ts --watch src --exec \"npx tsx src/index.ts\"", - "fmt": "prettier src -w", - "test": "mocha --exit", - "test:coverage": "nyc mocha --exit", - "build": "tsc", - "run-build": "node --import=extensionless/register dist/index.js" - }, - "author": "eth.limo team", - "license": "MIT", - "dependencies": { - "@ensdomains/content-hash": "^3.0.0-beta.5", - "@libp2p/peer-id": "^4.0.9", - "@types/dns-packet": "^5.2.4", - "@types/superagent": "^8.1.1", - "@web3-name-sdk/core": "^0.1.18", - "axios": "^1.4.0", - "cors": "^2.8.5", - "dns-packet": "^5.6.0", - "ethers": "^6.11.1", - "express": "^4.18.2", - "extensionless": "^1.9.6", - "inversify": "^6.0.1", - "ioredis": "^5.3.2", - "multiformats": "^13.0.0", - "node-cache": "^5.1.2", - "nyc": "^15.1.0", - "pkg": "^5.8.1", - "punycode": "^2.3.0", - "re2": "^1.19.0", - "redis-lock": "^1.0.0", - "redlock": "^5.0.0-beta.2", - "reflect-metadata": "^0.2.1", - "rfc4648": "^1.5.3", - "superagent": "^9.0.2", - "typescript": "^5.1.3", - "typeserializer": "^0.2.5", - "url-regex-safe": "^4.0.0", - "viem": "^1.21.4", - "warp-contracts": "^1.4.25", - "winston": "^3.9.0", - "zod": "^3.22.4" - }, - "devDependencies": { - "@types/chai": "^4.3.5", - "@types/cors": "^2.8.13", - "@types/express": "^4.17.17", - "@types/http-proxy": "^1.17.11", - "@types/mocha": "^10.0.1", - "@types/node": "^20.3.1", - "@types/punycode": "^2.1.3", - "@types/sinon": "^17.0.2", - "@types/sinon-chai": "^3.2.12", - "@types/url-regex-safe": "^1.0.0", - "chai": "^4.3.7", - "ioredis-mock": "^8.9.0", - "mocha": "^10.2.0", - "node-mocks-http": "^1.14.1", - "nodemon": "^3.0.1", - "prettier": "^3.1.0", - "sinon": "^17.0.1", - "sinon-chai": "^3.7.0", - "ts-mocha": "^10.0.0", - "ts-node": "^10.9.1", - "tsx": "^4.6.1" - }, - "overrides": { - "uint8arrays": "5.0.2" - }, - "type": "module" + "name": "dweb-api", + "version": "1.0.0", + "description": "Proxy middleware for ENS and other on chain naming services", + "author": "eth.limo team", + "license": "MIT", + "workspaces": [ + "packages/dweb-api-types", + "packages/dweb-api-logger", + "packages/dweb-api-cache", + "packages/dweb-api-resolver", + "packages/dweb-api-server", + "packages/dweb-api-serviceworker" + ], + "scripts": { + "test": "npm run test --workspace packages/dweb-api-server", + "clean": "rm -rf dist; rm -rf packages/*/dist; rm -rf node_modules; rm -rf packages/*/node_modules", + "build-all": "sh bin/build.sh" + }, + "dependencies": { + "node-polyfill-webpack-plugin": "^4.0.0", + "pkg": "^5.8.1", + "re2": "^1.19.0", + "rfc4648": "^1.5.3", + "winston": "^3.14.0", + "zod": "^3.22.4" + }, + "devDependencies": { + "@types/chai": "^4.3.5", + "@types/mocha": "^10.0.1", + "@types/node": "^20.3.1", + "@types/sinon": "^17.0.2", + "@types/sinon-chai": "^3.2.12", + "chai": "^4.3.7", + "dweb-api-types": "^1.0.0", + "mocha": "^10.2.0", + "nodemon": "^3.0.1", + "nyc": "^17.0.0", + "prettier": "^3.1.0", + "sinon": "^18.0.1", + "sinon-chai": "^3.7.0", + "ts-mocha": "^10.0.0", + "ts-node": "^10.9.1", + "tsx": "^4.6.1", + "typescript": "^5.1.3" + }, + "overrides": { + "uint8arrays": "5.1.0", + "axios": "1.8.4" + } } diff --git a/packages/dweb-api-cache/package.json b/packages/dweb-api-cache/package.json new file mode 100644 index 0000000..086dd20 --- /dev/null +++ b/packages/dweb-api-cache/package.json @@ -0,0 +1,18 @@ +{ + "name": "dweb-api-cache", + "version": "1.0.0", + "description": "Caching strategies for dweb-api-cache", + "scripts": { + "fmt": "prettier src -w", + "test": "mocha --exit", + "test:coverage": "nyc mocha --exit", + "build": "tsc" + }, + "author": "eth.limo team", + "license": "MIT", + "dependencies": { + "dweb-api-serviceworker": "file:../dweb-api-serviceworker", + "dweb-api-types": "file:../dweb-api-types" + }, + "type": "module" +} diff --git a/packages/dweb-api-cache/src/browser.ts b/packages/dweb-api-cache/src/browser.ts new file mode 100644 index 0000000..93ff0ba --- /dev/null +++ b/packages/dweb-api-cache/src/browser.ts @@ -0,0 +1,245 @@ +import { ICacheService } from "dweb-api-types/dist/cache.js"; +import { IRequestContext } from "dweb-api-types/dist/request-context"; +import { ILoggerService } from "dweb-api-types/dist/logger"; +import { ZodType } from "zod"; + +export const DEFAULT_TTL = 24 * 60 * 60 * 1000; // 1 day in milliseconds + +export class IndexedDbCacheService implements ICacheService { + private db: IDBDatabase | null = null; + private _logger: ILoggerService; + + constructor(logger: ILoggerService) { + this._logger = logger; + this.openDatabase(); + } + + private async openDatabase(): Promise { + return new Promise((resolve, reject) => { + const request = indexedDB.open("cache-db", 1); + + request.onupgradeneeded = (event) => { + const db = (event.target as IDBOpenDBRequest).result; + db.createObjectStore("cache-store", { keyPath: "key" }); + }; + + request.onsuccess = (event) => { + this.db = (event.target as IDBOpenDBRequest).result; + this._logger.info("IndexedDB opened successfully", { + origin: "IndexedDbCacheService", + trace_id: "open-db", + }); + resolve(); + }; + + request.onerror = (event) => { + this._logger.error("Failed to open IndexedDB", { + origin: "IndexedDbCacheService", + trace_id: "open-db", + context: { error: (event.target as IDBOpenDBRequest).error }, + }); + reject((event.target as IDBOpenDBRequest).error); + }; + }); + } + + private async storeInCache( + request: IRequestContext, + dbPrefix: string, + key: string, + value: RT, + ttl: number = DEFAULT_TTL, + ): Promise { + const now = Date.now(); + const expirationTime = now + ttl; + + const data = { + key: `${dbPrefix}-${key}`, + value: value, + expiration: expirationTime, + }; + + if (!this.db) await this.openDatabase(); + + try { + const tx = this.db!.transaction("cache-store", "readwrite"); + const store = tx.objectStore("cache-store"); + await store.put(data); + this._logger.debug("Data stored in cache", { + origin: "IndexedDbCacheService", + ...request, + context: { key, dbPrefix, expirationTime }, + }); + } catch (error) { + this._logger.error("Failed to store data in cache", { + origin: "IndexedDbCacheService", + ...request, + context: { error }, + }); + throw error; + } + } + + private async retrieveFromCache( + request: IRequestContext, + dbPrefix: string, + key: string, + ): Promise { + if (!this.db) await this.openDatabase(); + + try { + const tx = this.db!.transaction("cache-store", "readonly"); + const store = tx.objectStore("cache-store"); + const data = (await store.get(`${dbPrefix}-${key}`)) as unknown as + | { + key: string; + value: RT; + expiration: number; + } + | undefined; + + if (!data) { + this._logger.info("Cache miss", { + origin: "IndexedDbCacheService", + ...request, + context: { key, dbPrefix }, + }); + return null; + } + + const now = Date.now(); + if (now > data.expiration) { + await this.deleteFromCache(request, dbPrefix, key); + this._logger.info("Cache entry expired", { + origin: "IndexedDbCacheService", + ...request, + context: { key, dbPrefix }, + }); + return null; + } + + this._logger.debug("Cache hit", { + origin: "IndexedDbCacheService", + ...request, + context: { key, dbPrefix }, + }); + return data.value as RT; + } catch (error) { + this._logger.error("Failed to retrieve data from cache", { + origin: "IndexedDbCacheService", + ...request, + context: { error }, + }); + throw error; + } + } + + private async deleteFromCache( + request: IRequestContext, + dbPrefix: string, + key: string, + ): Promise { + if (!this.db) await this.openDatabase(); + + try { + const tx = this.db!.transaction("cache-store", "readwrite"); + const store = tx.objectStore("cache-store"); + await store.delete(`${dbPrefix}-${key}`); + this._logger.debug("Cache entry deleted", { + origin: "IndexedDbCacheService", + ...request, + context: { key, dbPrefix }, + }); + } catch (error) { + this._logger.error("Failed to delete cache entry", { + origin: "IndexedDbCacheService", + ...request, + context: { error }, + }); + throw error; + } + } + + async memoize( + request: IRequestContext, + fThunk: () => Promise>, + schema: ZodType, + dbPrefix: string, + key: string, + ): Promise> { + try { + const cachedData = await this.retrieveFromCache( + request, + dbPrefix, + key, + ); + if (cachedData) { + // Validate cached data against the schema + const parsedData = schema.parse(cachedData); + return await parsedData; + } + } catch (error) { + this._logger.warn( + "Cache retrieval or validation failed, executing fThunk", + { + origin: "IndexedDbCacheService", + ...request, + context: { key, dbPrefix, error }, + }, + ); + return await fThunk(); + } + + try { + // If not in cache or validation fails, execute the function + const result = await fThunk(); + await this.storeInCache(request, dbPrefix, key, result); + + return result; + } catch (error) { + this._logger.error("fThunk execution failed", { + origin: "IndexedDbCacheService", + ...request, + context: { error }, + }); + throw error; // Propagate the error if fThunk throws + } + } + + async getTtl( + request: IRequestContext, + dbPrefix: string, + key: string, + ): Promise { + if (!this.db) await this.openDatabase(); + + try { + const tx = this.db!.transaction("cache-store", "readonly"); + const store = tx.objectStore("cache-store"); + const data = (await store.get(`${dbPrefix}-${key}`)) as unknown as + | { + key: string; + value: unknown; + expiration: number; + } + | undefined; + + if (!data) return undefined; + + const now = Date.now(); + if (now > data.expiration) { + await this.deleteFromCache(request, dbPrefix, key); + return undefined; + } + + return data.expiration - now; + } catch (error) { + this._logger.error("Failed to get TTL", { + origin: "IndexedDbCacheService", + ...request, + context: { error }, + }); + return undefined; + } + } +} diff --git a/src/services/CacheService/index.ts b/packages/dweb-api-cache/src/index.ts similarity index 56% rename from src/services/CacheService/index.ts rename to packages/dweb-api-cache/src/index.ts index 26117be..88983c8 100644 --- a/src/services/CacheService/index.ts +++ b/packages/dweb-api-cache/src/index.ts @@ -1,45 +1,43 @@ import { Redis } from "ioredis"; -import { inject, injectable, interfaces } from "inversify"; import { serialize } from "typeserializer"; import NodeCache from "node-cache"; -import { ILoggerService } from "../LoggerService"; -import { DITYPES } from "../../dependencies/types"; -import { ZodType } from "zod"; -import { IConfigurationService } from "../../configuration"; +import { ILoggerService } from "dweb-api-types/dist/logger.js"; import redisMock from "ioredis-mock"; -import { IRequestContext } from "../lib"; +import { IRequestContext } from "dweb-api-types/dist/request-context.js"; +import { IRedisClient } from "dweb-api-types/dist/redis.js"; +import { ICacheService, INamedMemoryCache } from "dweb-api-types/dist/cache.js"; +import { ZodType } from "zod"; +import { ICacheConfig, IRedisConfig } from "dweb-api-types/dist/config.js"; interface TheRedisPartsWeUse { - get: typeof Redis.prototype.get, - set: typeof Redis.prototype.set, - ttl: (key: string) => Promise, - incr: typeof Redis.prototype.incr, - expire: typeof Redis.prototype.expire, -} - -export interface IRedisClient { - get(key: string): Promise; - set(key: string, value: string, duration: number): Promise; - ttl(key: string): Promise; - expire(key: string, duration: number): Promise; - incr(key: string): Promise; + get: typeof Redis.prototype.get; + set: typeof Redis.prototype.set; + ttl: (key: string) => Promise; + incr: typeof Redis.prototype.incr; + expire: typeof Redis.prototype.expire; } -@injectable() -export class AbstractRedisClient implements IRedisClient { +export abstract class AbstractRedisClient implements IRedisClient { _redis: TheRedisPartsWeUse; - _configurationService: IConfigurationService; + _configurationService: IRedisConfig; _timeout = 1000; private _wait(ms: number, reason: string) { - return new Promise((_, reject) => setTimeout(() => reject(new Error(`AbstractRedisClient timeout: ${reason}`)), ms)); + return new Promise((_, reject) => + setTimeout( + () => reject(new Error(`AbstractRedisClient timeout: ${reason}`)), + ms, + ), + ); } - private _racePromise(ms: number, reason: string, promise: Promise) { - return Promise.race([promise, this._wait(ms, reason)]) as Promise; + constructor(_redis: TheRedisPartsWeUse, _configurationService: IRedisConfig) { + this._redis = _redis; + this._configurationService = _configurationService; } - constructor() { + private _racePromise(ms: number, reason: string, promise: Promise) { + return Promise.race([promise, this._wait(ms, reason)]) as Promise; } async get(key: string): Promise { @@ -47,85 +45,81 @@ export class AbstractRedisClient implements IRedisClient { } async set(key: string, value: string, duration: number): Promise<"OK"> { - return this._racePromise(this._timeout, `set ${key}=${value} (duration: ${duration})`, this._redis.set(key, value, "EX", duration)); + return this._racePromise( + this._timeout, + `set ${key}=${value} (duration: ${duration})`, + this._redis.set(key, value, "EX", duration), + ); } async ttl(key: string): Promise { return this._racePromise(this._timeout, `ttl ${key}`, this._redis.ttl(key)); } async incr(key: string): Promise { - return this._racePromise(this._timeout, `incr ${key}`, this._redis.incr(key)); - }; + return this._racePromise( + this._timeout, + `incr ${key}`, + this._redis.incr(key), + ); + } async expire(key: string, duration: number): Promise { - return this._racePromise(this._timeout, `expire ${key} ${duration}`, this._redis.expire(key, duration)); + return this._racePromise( + this._timeout, + `expire ${key} ${duration}`, + this._redis.expire(key, duration), + ); } } -export interface INamedMemoryCache { - getServiceName(): string; - put: (request: IRequestContext, key: string, v: T, ttl?: Number) => void; - get: (request: IRequestContext, key: string) => Promise; - getTtl: (request: IRequestContext, key: string) => Promise; -} - export class MemoryCacheFactory { memoryCaches = new Map(); createNamedMemoryCacheFactory = ( - context: interfaces.Context, serviceName: string, + logger: ILoggerService, + configurationService: ICacheConfig, ): INamedMemoryCache => { if (this.memoryCaches.has(serviceName)) { return this.memoryCaches.get(serviceName)!; } else { - const namedMemoryCache = this.memoryCaches.get(serviceName); - if (namedMemoryCache) { - return namedMemoryCache; - } else { - const v = new NamedMemoryCache( - context.container.get(DITYPES.LoggerService), - serviceName, - context.container.get(DITYPES.ConfigurationService), - ); - this.memoryCaches.set(serviceName, v); - return v; - } + const memoryCache = new NamedMemoryCache( + logger, + serviceName, + configurationService, + ); + this.memoryCaches.set(serviceName, memoryCache); + return memoryCache; } }; } - - -@injectable() export class RedisClient extends AbstractRedisClient { - constructor(@inject(DITYPES.ConfigurationService) configurationService: IConfigurationService) { - super(); - this._redis = new Redis(configurationService.get().redis.url); - this._configurationService = configurationService; + constructor(configurationService: IRedisConfig) { + const redis = new Redis(configurationService.getRedisConfig().getUrl()); + super(redis, configurationService); } } -@injectable() export class TestRedisClient extends AbstractRedisClient { mappings = new Map(); //this is gross but it's necessary because the test suite does early binding of the server service - proxy: AbstractRedisClient | null; + proxy: AbstractRedisClient | null = null; + + constructor(configurationService: IRedisConfig) { + const redis = new (redisMock as any)(); + super(redis, configurationService); - constructor(@inject(DITYPES.ConfigurationService) configurationService: IConfigurationService) { - super(); - this._configurationService = configurationService; - this._redis = new redisMock(); } async get(key: string): Promise { - if(this.proxy) { + if (this.proxy) { return this.proxy.get(key); } return this.mappings.get(key) || null; } async set(key: string, value: string, duration: number): Promise<"OK"> { - if(this.proxy) { + if (this.proxy) { return this.proxy.set(key, value, duration); } this.mappings.set(key, value); @@ -133,21 +127,21 @@ export class TestRedisClient extends AbstractRedisClient { } async ttl(key: string): Promise { - if(this.proxy) { + if (this.proxy) { return this.proxy.ttl(key); } return 69; } async incr(key: string): Promise { - if(this.proxy) { + if (this.proxy) { return this.proxy.incr(key); } return 70; } async expire(key: string, duration: number): Promise { - if(this.proxy) { + if (this.proxy) { return this.proxy.expire(key, duration); } return 71; @@ -158,24 +152,31 @@ export class TestRedisClient extends AbstractRedisClient { } } -@injectable() export class NamedMemoryCache implements INamedMemoryCache { - _configurationService: IConfigurationService; + _configurationService: ICacheConfig; private _cache: NodeCache; private _logger: ILoggerService; private _serviceName: string; - public async put(request: IRequestContext, key: string, v: T, ttl?: number) { - const configuration = this._configurationService.get(); - this._logger.debug(`interning ${key}`, { ...request, origin: "NamedMemoryCache" }); + public async put( + request: IRequestContext, + key: string, + v: T, + ttl?: number, + ) { + const configuration = this._configurationService.getCacheConfig(); + this._logger.debug(`interning ${key}`, { + ...request, + origin: "NamedMemoryCache", + }); if (ttl) { this._cache.set( key, v, - Math.min(Math.max(ttl, 1), configuration.cache.ttl), + Math.min(Math.max(ttl, 1), configuration.getTtl()), ); } else { - this._cache.set(key, v, configuration.cache.ttl); + this._cache.set(key, v, configuration.getTtl()); } } async get(request: IRequestContext, key: string): Promise { @@ -188,50 +189,45 @@ export class NamedMemoryCache implements INamedMemoryCache { } public constructor( - @inject(DITYPES.LoggerService) logger: ILoggerService, - @inject("serviceName") serviceName: string, - @inject(DITYPES.ConfigurationService) configurationService: IConfigurationService, + logger: ILoggerService, + serviceName: string, + configurationService: ICacheConfig, ) { + this._configurationService = configurationService; this._logger = logger; this._serviceName = serviceName; - this._configurationService = configurationService; this._cache = new NodeCache({ - stdTTL: this._configurationService.get().cache.ttl, + stdTTL: this._configurationService.getCacheConfig().getTtl(), }); this._cache.on("expired", function (key, value) { - logger.info("expired key", {origin: "NodeCache expired", trace_id: "N/A", context: { - key: key, - value: value - }}); //implements #13 + logger.info("expired key", { + origin: "NodeCache expired", + trace_id: "N/A", + context: { + key: key, + value: value, + }, + }); //implements #13 }); } - public async getTtl(request: IRequestContext, key: string): Promise { - return this._cache.getTtl(key); - } -} - -export interface ICacheService { - memoize: ( + public async getTtl( request: IRequestContext, - fThunk: () => Promise>, - schema: ZodType, - dbPrefix: string, key: string, - ) => Promise>; - getTtl: (request: IRequestContext, dbPrefix: string, key: string) => Promise; + ): Promise { + return this._cache.getTtl(key); + } } -@injectable() export class RedisCacheService implements ICacheService { private _redisClient: IRedisClient; _logger: ILoggerService; - _configurationService: IConfigurationService; + _configurationService: IRedisConfig & ICacheConfig; public constructor( - @inject(DITYPES.LoggerService) logger: ILoggerService, - @inject(DITYPES.RedisClient) redisClient: IRedisClient, - @inject(DITYPES.ConfigurationService) configurationService: IConfigurationService, + logger: ILoggerService, + redisClient: IRedisClient, + configurationService: IRedisConfig & ICacheConfig, ) { this._logger = logger; this._redisClient = redisClient; @@ -245,9 +241,9 @@ export class RedisCacheService implements ICacheService { dbPrefix: string, key: string, ): Promise> { - const configuration = this._configurationService.get(); + const cacheConfig = this._configurationService.getCacheConfig(); const cKey = `${dbPrefix}/${key}`; - const defaultTtl = configuration.cache.ttl; + const defaultTtl = cacheConfig.getTtl(); try { var cachedValue = await this._redisClient.get(cKey); const redisTTL = await this._redisClient.ttl(cKey); @@ -270,8 +266,8 @@ export class RedisCacheService implements ICacheService { context: { key: cKey, value: cachedValue, - } - } + }, + }, ); cachedValue = null; } @@ -279,70 +275,70 @@ export class RedisCacheService implements ICacheService { const result = await fThunk(); try { - this._logger.info( - 'Setting cache value', - { - ...request, - origin: "RedisCacheService", - context: { - key: cKey, - value: result, - ttl: ttl, - } - } - ); + this._logger.info("Setting cache value", { + ...request, + origin: "RedisCacheService", + context: { + key: cKey, + value: result, + ttl: ttl, + }, + }); await this._redisClient.set(cKey, serialize(result), ttl); } catch (e) { - this._logger.error('Failed to set cache value', { + this._logger.error("Failed to set cache value", { ...request, origin: "RedisCacheService", context: { key: cKey, value: result, ttl: ttl, - } + }, }); } return result; } catch (e) { - this._logger.error( - 'received error when querying cache', - { - ...request, - origin: "RedisCacheService", - context: { - key: cKey, - error: e, - } - } - ); + this._logger.error("received error when querying cache", { + ...request, + origin: "RedisCacheService", + context: { + key: cKey, + error: e, + }, + }); return fThunk(); } } - public async getTtl(request: IRequestContext, dbPrefix: string, key: string): Promise { + public async getTtl( + request: IRequestContext, + dbPrefix: string, + key: string, + ): Promise { const cKey = `${dbPrefix}/${key}`; return await this._redisClient.ttl(cKey); } } -@injectable() export class LocallyCachedRedisCacheService implements ICacheService { _innerRedis: ICacheService; _innerMemoryCache: INamedMemoryCache; _logger: ILoggerService; - _configurationService: IConfigurationService; + _configurationService: IRedisConfig & ICacheConfig; public constructor( - @inject(DITYPES.LoggerService) logger: ILoggerService, - @inject(DITYPES.RedisClient) redisClient: IRedisClient, - @inject(DITYPES.NamedMemoryCacheFactory) + logger: ILoggerService, + redisClient: IRedisClient, innerMemoryCacheFactory: (str: string) => INamedMemoryCache, - @inject(DITYPES.ConfigurationService) configurationService: IConfigurationService, + configurationService: IRedisConfig & ICacheConfig, ) { this._logger = logger; this._configurationService = configurationService; - this._innerRedis = new RedisCacheService(logger, redisClient, configurationService); + this._innerRedis = new RedisCacheService( + logger, + redisClient, + configurationService, + ); this._innerMemoryCache = innerMemoryCacheFactory("LocallyCachedRedisCache"); } @@ -353,23 +349,23 @@ export class LocallyCachedRedisCacheService implements ICacheService { dbPrefix: string, key: string, ): Promise> { - const configuration = this._configurationService.get(); + const cacheConfig = this._configurationService.getCacheConfig(); const cKey = `${dbPrefix}/${key}`; - const defaultTtl = configuration.cache.ttl; + const defaultTtl = cacheConfig.getTtl(); try { const cachedValue = await this._innerMemoryCache.get(request, cKey); if (cachedValue) { this._logger.info( - 'LocallyCachedRedisCacheService: returning cached value memory cache', + "LocallyCachedRedisCacheService: returning cached value memory cache", { ...request, origin: "LocallyCachedRedisCacheService", context: { key: cKey, value: cachedValue, - } - } + }, + }, ); return cachedValue; } @@ -385,28 +381,25 @@ export class LocallyCachedRedisCacheService implements ICacheService { (await this._innerRedis.getTtl(request, dbPrefix, key)) || defaultTtl, defaultTtl, ); - this._logger.info( - 'setting cached value for via memory cache', - { - ...request, - origin: "LocallyCachedRedisCacheService", - context: { - key: cKey, - value: result, - ttl: ttl, - } - } - ); + this._logger.info("setting cached value for via memory cache", { + ...request, + origin: "LocallyCachedRedisCacheService", + context: { + key: cKey, + value: result, + ttl: ttl, + }, + }); this._innerMemoryCache.put(request, cKey, result, ttl); return result; } catch (e) { - this._logger.error('memoize error', { + this._logger.error("memoize error", { ...request, origin: "LocallyCachedRedisCacheService", context: { key: cKey, error: e, - } + }, }); return fThunk(); } @@ -429,9 +422,10 @@ export class LocallyCachedRedisCacheService implements ICacheService { } class TestLaggyRedisClientInnerRedis implements TheRedisPartsWeUse { - inner = () => new Promise((_, reject) => { - setTimeout(() => reject("Error: timeout"), 100000); - }); + inner = () => + new Promise((_, reject) => { + setTimeout(() => reject("Error: timeout"), 100000); + }); get(key: string): Promise { return this.inner() as any; @@ -455,9 +449,9 @@ class TestLaggyRedisClientInnerRedis implements TheRedisPartsWeUse { } export class TestLaggyRedisClientProxy extends AbstractRedisClient { - constructor() { - super(); - this._redis = new TestLaggyRedisClientInnerRedis(); + constructor(configurationService: IRedisConfig) { + const redis = new TestLaggyRedisClientInnerRedis(); + super(redis, configurationService); this._timeout = 5; } -} \ No newline at end of file +} diff --git a/packages/dweb-api-cache/src/passthrough.ts b/packages/dweb-api-cache/src/passthrough.ts new file mode 100644 index 0000000..a41bc89 --- /dev/null +++ b/packages/dweb-api-cache/src/passthrough.ts @@ -0,0 +1,25 @@ +import { ICacheService } from "dweb-api-types/dist/cache.js"; +import { IRequestContext } from "dweb-api-types/dist/request-context"; +import { ZodType } from "zod"; + +export class PassthroughCacheService implements ICacheService { + async memoize( + _request: IRequestContext, + fThunk: () => Promise>, + _schema: ZodType, + _dbPrefix: string, + _key: string, + ): Promise> { + // Directly execute fThunk and return the result, without caching + return await fThunk(); + } + + async getTtl( + _request: IRequestContext, + _dbPrefix: string, + _key: string, + ): Promise { + // Since this is a passthrough, there is no TTL to return + return undefined; + } +} diff --git a/packages/dweb-api-cache/tsconfig.json b/packages/dweb-api-cache/tsconfig.json new file mode 100644 index 0000000..d3de9eb --- /dev/null +++ b/packages/dweb-api-cache/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "../../tsconfig.base.json", + "exclude": [ ], + "include": [ + "./src/**/*.ts" + ], + "compilerOptions": { + "rootDir": "./src", + "outDir": "./dist", + "baseUrl": "./src", + "moduleResolution": "bundler", + "module": "ES2022" + }, +} \ No newline at end of file diff --git a/packages/dweb-api-logger/package.json b/packages/dweb-api-logger/package.json new file mode 100644 index 0000000..020c7a5 --- /dev/null +++ b/packages/dweb-api-logger/package.json @@ -0,0 +1,12 @@ +{ + "name": "dweb-api-logger", + "version": "1.0.0", + "description": "Logger for dweb-api", + "scripts": { + "fmt": "prettier src -w", + "build": "tsc" + }, + "author": "eth.limo team", + "license": "MIT", + "type": "module" +} diff --git a/packages/dweb-api-logger/src/index.ts b/packages/dweb-api-logger/src/index.ts new file mode 100644 index 0000000..8d72bae --- /dev/null +++ b/packages/dweb-api-logger/src/index.ts @@ -0,0 +1,95 @@ +import { createLogger, format, transports, Logger } from "winston"; +import { + ILoggerService, + ILoggerServiceContext, +} from "dweb-api-types/dist/logger"; +import { IConfigurationLogger } from "dweb-api-types/dist/config"; + +export class LoggerService implements ILoggerService { + _configurationService: IConfigurationLogger; + _logger: Logger; + + constructor(configurationService: IConfigurationLogger) { + this._configurationService = configurationService; + this._logger = createLogger({ + level: this._configurationService.getLoggerConfig().getLevel(), + format: format.json(), + defaultMeta: { service: "limo-proxy" }, + }).add( + new transports.Console({ + format: format.json(), + }), + ); + } + + internal_log( + severity: "warn" | "error" | "info" | "debug", + msg: string, + context: ILoggerServiceContext, + ) { + this._logger.log({ + level: severity, + message: msg, + ...context, + }); + } + + public warn(msg: string, context: ILoggerServiceContext) { + this.internal_log("warn", msg, context); + } + public error(msg: string, context: ILoggerServiceContext) { + this.internal_log("error", msg, context); + } + public info(msg: string, context: ILoggerServiceContext) { + this.internal_log("info", msg, context); + } + public debug(msg: string, context: ILoggerServiceContext) { + this.internal_log("debug", msg, context); + } +} + +type TestLoggerServiceEnum = "warn" | "error" | "info" | "debug"; +type TestLoggerServiceMsg = { + severity: TestLoggerServiceEnum; + message: string; + ctx: any; +}; + +/** + * This is a test logger service for configurable squelching of logs + * when debugging tests, call logMessages before expect values to interrogate the log stack + * note: the debug configuration should automatically set log level to DEBUG so reusing the logger is fine + */ +export class TestLoggerService implements ILoggerService { + _configurationService: IConfigurationLogger; + _logger: ILoggerService; + msgs: TestLoggerServiceMsg[] = []; + constructor(configurationService: IConfigurationLogger) { + this._configurationService = configurationService; + this._logger = new LoggerService(configurationService); + } + + public warn(msg: string) { + this.msgs.push({ severity: "warn", message: msg, ctx: null }); + } + public error(msg: string, ctx: any) { + this.msgs.push({ severity: "error", message: msg, ctx: ctx }); + } + public info(msg: string) { + this.msgs.push({ severity: "info", message: msg, ctx: null }); + } + public debug(msg: string) { + this.msgs.push({ severity: "debug", message: msg, ctx: null }); + } + public logMessages() { + for (let i = 0; i < this.msgs.length; i++) { + let msg = this.msgs[i]; + this._logger[msg.severity](msg.message, msg.ctx); + } + this.clearMessages(); + } + + public clearMessages() { + this.msgs = []; + } +} diff --git a/packages/dweb-api-logger/src/jsonlogger.ts b/packages/dweb-api-logger/src/jsonlogger.ts new file mode 100644 index 0000000..c807fd5 --- /dev/null +++ b/packages/dweb-api-logger/src/jsonlogger.ts @@ -0,0 +1,38 @@ +import { + ILoggerService, + ILoggerServiceContext, +} from "dweb-api-types/dist/logger"; + +export class JsonLoggerService implements ILoggerService { + private log( + level: string, + message: string, + context: ILoggerServiceContext, + ): void { + const logEntry = { + timestamp: new Date().toISOString(), + level, + message, + origin: context.origin, + trace_id: context.trace_id, + context: context.context || {}, + }; + console.log(JSON.stringify(logEntry)); + } + + error(message: string, context: ILoggerServiceContext): void { + this.log("error", message, context); + } + + warn(message: string, context: ILoggerServiceContext): void { + this.log("warn", message, context); + } + + info(message: string, context: ILoggerServiceContext): void { + this.log("info", message, context); + } + + debug(message: string, context: ILoggerServiceContext): void { + this.log("debug", message, context); + } +} diff --git a/packages/dweb-api-logger/tsconfig.json b/packages/dweb-api-logger/tsconfig.json new file mode 100644 index 0000000..d3de9eb --- /dev/null +++ b/packages/dweb-api-logger/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "../../tsconfig.base.json", + "exclude": [ ], + "include": [ + "./src/**/*.ts" + ], + "compilerOptions": { + "rootDir": "./src", + "outDir": "./dist", + "baseUrl": "./src", + "moduleResolution": "bundler", + "module": "ES2022" + }, +} \ No newline at end of file diff --git a/packages/dweb-api-resolver/package.json b/packages/dweb-api-resolver/package.json new file mode 100644 index 0000000..63ef7ec --- /dev/null +++ b/packages/dweb-api-resolver/package.json @@ -0,0 +1,39 @@ +{ + "name": "dweb-api-resolver", + "version": "1.0.0", + "description": "ENS content hash resolution library for dweb-api", + "scripts": { + "fmt": "prettier src -w", + "test": "mocha --exit", + "test:coverage": "nyc mocha --exit", + "build": "tsc" + }, + "author": "eth.limo team", + "license": "MIT", + "dependencies": { + "@ensdomains/content-hash": "^3.0.0-beta.5", + "@libp2p/peer-id": "^4.0.9", + "@web3-name-sdk/core": "^0.2.0", + "ethers": "^6.13.3", + "multiformats": "^13.0.0", + "rfc4648": "^1.5.3" + }, + "devDependencies": { + "@types/chai": "^4.3.5", + "@types/mocha": "^10.0.1", + "@types/node": "^20.3.1", + "@types/sinon": "^17.0.2", + "@types/sinon-chai": "^3.2.12", + "chai": "^4.3.7", + "mocha": "^10.2.0", + "nodemon": "^3.0.1", + "prettier": "^3.1.0", + "sinon": "^18.0.1", + "sinon-chai": "^3.7.0", + "ts-mocha": "^10.0.0", + "ts-node": "^10.9.1", + "tsx": "^4.6.1", + "typescript": "^5.1.3" + }, + "type": "module" +} diff --git a/packages/dweb-api-resolver/src/HostnameSubstitutionService/index.ts b/packages/dweb-api-resolver/src/HostnameSubstitutionService/index.ts new file mode 100644 index 0000000..a4b10f1 --- /dev/null +++ b/packages/dweb-api-resolver/src/HostnameSubstitutionService/index.ts @@ -0,0 +1,126 @@ +import { + HostnameSubstitutionConfiguration, + IConfigHostnameSubstitution, +} from "dweb-api-types/dist/config"; +import { ILoggerService } from "dweb-api-types/dist/logger"; + +export interface IHostnameSubstitutionService { + substituteHostname(hostname: string): string; +} + +export class HostnameSubstitutionService + implements IHostnameSubstitutionService +{ + _configuration: HostnameSubstitutionConfiguration; + _logger: ILoggerService; + + constructor( + configurationService: IConfigHostnameSubstitution, + logger: ILoggerService, + ) { + const config = configurationService.getHostnameSubstitutionConfig(); + this._configuration = {}; + this._logger = logger; + const logger_context = { + origin: "HostnameSubstitutionService", + trace_id: "UNDEFINED_TRACE_ID", + }; + for (const key in config) { + if (typeof config[key] === "string") { + this._configuration[key] = config[key]; + logger.debug(`Registered suffix ${key}=${config[key]}`, { + ...logger_context, + context: { + key, + value: config[key], + configuration: this._configuration, + }, + }); + } else { + logger.error("Invalid hostname substitution configuration", { + ...logger_context, + context: { + key, + value: config[key], + }, + }); + } + } + + logger.info("Hostname substitution service initialized", { + ...logger_context, + context: { + substitutions: this._configuration, + }, + }); + } + substituteHostname(url: string): string { + const logger_context = { + origin: "HostnameSubstitutionService", + //TODO: add this TRACE_ID + trace_id: "UNDEFINED_TRACE_ID", + }; + + var host: URL; + var strip: boolean = false; + try { + host = new URL(url); + } catch (e) { + try { + host = new URL("http://" + url); + strip = true; + } catch (e) { + this._logger.info("Hostname can not be substituted, invalid URL", { + ...logger_context, + context: { + url, + }, + }); + return url; + } + } + + const [hostname] = host.host.split(":"); + for (const key in this._configuration) { + if (hostname.endsWith(key)) { + const new_hostname = + hostname.substring(0, hostname.length - key.length) + + this._configuration[key]; + + let recombined_hostname = new_hostname; + + this._logger.debug( + `Substituted hostname ${hostname} -> ${recombined_hostname}`, + { + ...logger_context, + context: { + key, + value: this._configuration[key], + hostname: host.hostname, + new_hostname: recombined_hostname, + }, + }, + ); + + const new_url = new URL(strip ? "https://" + url : url); + new_url.host = recombined_hostname; + new_url.protocol = "https:"; + var ret; + ret = new_url.toString().substring("https://".length); + + while (ret.endsWith("/") && !url.endsWith("/")) { + ret = ret.substring(0, ret.length - 1); + } + return ret; + } + } + + this._logger.debug(`No substitution for hostname ${host.toString()}`, { + ...logger_context, + context: { + hostname: host.host, + }, + }); + return url; + } +} diff --git a/packages/dweb-api-resolver/src/HostnameSubstitutionService/parseRawConfig.ts b/packages/dweb-api-resolver/src/HostnameSubstitutionService/parseRawConfig.ts new file mode 100644 index 0000000..b13ae0b --- /dev/null +++ b/packages/dweb-api-resolver/src/HostnameSubstitutionService/parseRawConfig.ts @@ -0,0 +1,18 @@ +import { HostnameSubstitutionConfiguration } from "dweb-api-types/dist/config"; + +export function parseRawConfig( + rawConfig: string, +): HostnameSubstitutionConfiguration { + try { + return JSON.parse(Buffer.from(rawConfig, "base64").toString()); + } catch { + // Fallback to plain JSON + } + try { + return JSON.parse(rawConfig); + } catch (e: any) { + throw new Error( + `Invalid hostname substitution configuration: ${e.message}`, + ); + } +} diff --git a/packages/dweb-api-resolver/src/nameservice/EnsService.ts b/packages/dweb-api-resolver/src/nameservice/EnsService.ts new file mode 100644 index 0000000..22ad99a --- /dev/null +++ b/packages/dweb-api-resolver/src/nameservice/EnsService.ts @@ -0,0 +1,142 @@ +import { FallbackProvider, JsonRpcProvider, AbstractProvider } from "ethers"; +import { ILoggerService } from "dweb-api-types/dist/logger"; +import { IRequestContext } from "dweb-api-types/dist/request-context"; +import { INameService } from "dweb-api-types/dist/name-service"; +import { + IConfigurationEthereum, + IConfigurationEthereumFailover, +} from "dweb-api-types/dist/config"; +import { getContentHashFallback } from "./utils.js"; +const getEnsContentHash = async ( + request: IRequestContext, + provider: AbstractProvider, + logger: ILoggerService, + name: string, +): Promise => { + const res = await provider.getResolver(name); + if (!res) { + logger.debug("no resolver", { + ...request, + origin: "getEnsContentHash", + context: { + name: name, + }, + }); + return null; + } + try { + const contentHash = await res.getContentHash(); + return contentHash; + } catch (e: any) { + if (e?.code === "UNSUPPORTED_OPERATION" && e?.info?.data) { + logger.debug("entering fallback", { + ...request, + origin: "getEnsContentHash", + context: { + name: name, + error: e, + }, + }); + return getContentHashFallback( + request, + logger, + e.info.data, + name, + "EnsService", + ); + } else { + throw e; + } + } +}; + +export class EnsService implements INameService { + _configurationService: IConfigurationEthereum & + Partial; + provider: FallbackProvider; + _logger: ILoggerService; + constructor( + configurationService: IConfigurationEthereum & + Partial, + logger: ILoggerService, + ) { + this._configurationService = configurationService; + const ethereumConfig = + this._configurationService.getConfigEthereumBackend(); + const failoverConfigOriginal = + this._configurationService.getConfigEthereumFailover && + this._configurationService.getConfigEthereumFailover(); + const rpc = ethereumConfig.getBackend(); + const failoverConfig = { + ...failoverConfigOriginal, + getProviderStallTimeout: + failoverConfigOriginal?.getProviderStallTimeout || (() => 10000), + getStallTimeout: failoverConfigOriginal?.getStallTimeout || (() => 10000), + getQuorum: failoverConfigOriginal?.getQuorum || (() => 1), + getPrimaryFailoverBackend: + failoverConfigOriginal?.getPrimaryFailoverBackend || (() => null), + getSecondaryFailoverBackend: + failoverConfigOriginal?.getSecondaryFailoverBackend || (() => null), + }; + const primary_failover = failoverConfig.getPrimaryFailoverBackend(); + const secondary_failover = failoverConfig.getSecondaryFailoverBackend(); + const quorum = failoverConfig.getQuorum(); + const providers = [ + new JsonRpcProvider(rpc, undefined, { + staticNetwork: true, + }), + ]; + if (primary_failover) { + logger.info("EnsService: adding failover_primary", { + trace_id: "UNKNOWN_TRACE_ID", + origin: "EnsService", + }); + providers.push( + new JsonRpcProvider(primary_failover, undefined, { + staticNetwork: true, + }), + ); + } + if (secondary_failover) { + logger.info("EnsService: adding failover_secondary", { + trace_id: "UNKNOWN_TRACE_ID", + origin: "EnsService", + }); + providers.push( + new JsonRpcProvider(secondary_failover, undefined, { + staticNetwork: true, + }), + ); + } + const providers_as_config: { + provider: JsonRpcProvider; + priority: number; + weight: number; + stallTimeout: number; + }[] = providers.map((provider, index) => { + provider._getConnection().timeout = failoverConfig.getStallTimeout(); + return { + provider, + priority: index, + weight: 1, + stallTimeout: failoverConfig.getProviderStallTimeout(), + }; + }); + + this.provider = new FallbackProvider(providers_as_config, quorum); + this._logger = logger; + } + + async getContentHash( + request: IRequestContext, + name: string, + ): Promise { + const res = await getEnsContentHash( + request, + this.provider, + this._logger, + name, + ); + return res; + } +} diff --git a/packages/dweb-api-resolver/src/nameservice/Web3NameSdkService.ts b/packages/dweb-api-resolver/src/nameservice/Web3NameSdkService.ts new file mode 100644 index 0000000..7fc59f2 --- /dev/null +++ b/packages/dweb-api-resolver/src/nameservice/Web3NameSdkService.ts @@ -0,0 +1,50 @@ +import { createWeb3Name } from "@web3-name-sdk/core"; +import { IRequestContext } from "dweb-api-types/dist/request-context.js"; +import { ILoggerService } from "dweb-api-types/dist/logger.js"; +import { INameService } from "dweb-api-types/dist/name-service.js"; +import { + IConfigurationEthereum, + IConfigurationGnosis, +} from "dweb-api-types/dist/config.js"; +import { getContentHashFallback } from "./utils.js"; + +export class Web3NameSdkService implements INameService { + _configurationService: IConfigurationGnosis & IConfigurationEthereum; + _logger: ILoggerService; + //type exposed by imports + _web3name: ReturnType; + + constructor( + configurationService: IConfigurationGnosis & IConfigurationEthereum, + logger: ILoggerService, + ) { + this._configurationService = configurationService; + this._logger = logger; + this._web3name = createWeb3Name({ + isDev: false, + rpcUrl: this._configurationService + .getConfigEthereumBackend() + .getBackend(), + }); + } + + async getContentHash( + request: IRequestContext, + name: string, + ): Promise { + const res = await this._web3name.getContentHash({ + name, + rpcUrl: this._configurationService.getConfigGnosisBackend().getBackend(), + }); + if (!res) { + return null; + } + return getContentHashFallback( + request, + this._logger, + res, + name, + "Web3NameSdkService", + ); + } +} diff --git a/packages/dweb-api-resolver/src/nameservice/index.ts b/packages/dweb-api-resolver/src/nameservice/index.ts new file mode 100644 index 0000000..d803332 --- /dev/null +++ b/packages/dweb-api-resolver/src/nameservice/index.ts @@ -0,0 +1,42 @@ +import { IRequestContext } from "dweb-api-types/dist/request-context.js"; +import { ILoggerService } from "dweb-api-types/dist/logger.js"; +import { + INameService, + INameServiceFactory, +} from "dweb-api-types/dist/name-service.js"; +export type Tag = "IEnsServiceError"; +export type ErrorType = "error"; + +export class NameServiceFactory implements INameServiceFactory { + _logger: ILoggerService; + _ensService: INameService; + _web3NameSdkService: INameService; + + constructor( + logger: ILoggerService, + ensService: INameService, + web3NameSdkService: INameService, + ) { + this._logger = logger; + this._ensService = ensService; + this._web3NameSdkService = web3NameSdkService; + } + + getNameServiceForDomain( + request: IRequestContext, + domain: string, + ): INameService { + if (domain.endsWith(".gno")) { + this._logger.debug("Using Web3NameSdkService for domain " + domain, { + ...request, + origin: "NameServiceFactory", + }); + return this._web3NameSdkService; + } + this._logger.debug("Using EnsService for domain " + domain, { + ...request, + origin: "NameServiceFactory", + }); + return this._ensService; + } +} diff --git a/packages/dweb-api-resolver/src/nameservice/utils.ts b/packages/dweb-api-resolver/src/nameservice/utils.ts new file mode 100644 index 0000000..0853976 --- /dev/null +++ b/packages/dweb-api-resolver/src/nameservice/utils.ts @@ -0,0 +1,45 @@ +import { decode, getCodec } from "@ensdomains/content-hash"; +import { ILoggerService } from "dweb-api-types/dist/logger"; +import { IRequestContext } from "dweb-api-types/dist/request-context"; + +const fixCodecReturnValue = (codec: string | undefined): string | undefined => { + if (codec === "swarm") { + return "bzz"; + } + return codec; +}; + +export const getContentHashFallback = ( + request: IRequestContext, + logger: ILoggerService, + res: string, + name: string, + serviceName: string, +): string | null => { + const codec = fixCodecReturnValue(getCodec(res)); + const content = decode(res); + + console.log("codec", codec); + if (!codec || !content) { + logger.error("unsupported fallback decode operation", { + ...request, + origin: serviceName, + context: { + name, + codec, + content, + }, + }); + return null; + } + const contentHashDecoded = `${codec}://${content}`; + logger.debug("getContentHash", { + ...request, + origin: serviceName, + context: { + name, + contentHash: contentHashDecoded, + }, + }); + return contentHashDecoded; +}; diff --git a/src/services/EnsResolverService/arweave.ts b/packages/dweb-api-resolver/src/resolver/arweave.ts similarity index 59% rename from src/services/EnsResolverService/arweave.ts rename to packages/dweb-api-resolver/src/resolver/arweave.ts index 1941a82..f11028e 100644 --- a/src/services/EnsResolverService/arweave.ts +++ b/packages/dweb-api-resolver/src/resolver/arweave.ts @@ -1,23 +1,16 @@ import { LoggerFactory, WarpFactory } from "warp-contracts"; import { base32 } from "rfc4648"; -import { ILoggerService } from "../LoggerService"; -import { inject, injectable } from "inversify"; -import { DITYPES } from "../../dependencies/types"; -import { IRequestContext } from "../lib"; +import { ILoggerService } from "dweb-api-types/dist/logger.js"; +import { IRequestContext } from "dweb-api-types/dist/request-context.js"; +import { IArweaveResolver } from "dweb-api-types/dist/arweave.js"; LoggerFactory.INST.setOptions({ LogLevel: "none" }); -export interface IArweaveResolver { - resolveArweave: (request: IRequestContext, tx_id: string, ens_name: string) => Promise; -}; - -@injectable() export class ArweaveResolver implements IArweaveResolver { - warp = WarpFactory.forMainnet({ inMemory: true, dbLocation: "./warpdb" }); _logger: ILoggerService; - constructor(@inject(DITYPES.LoggerService) logger: ILoggerService) { + constructor(logger: ILoggerService) { this._logger = logger; } @@ -28,22 +21,23 @@ export class ArweaveResolver implements IArweaveResolver { const state = await contract.readState(); return state; } catch (error) { - this._logger.info( - 'invalid arweave tx id', - { - ...request, - origin: 'arweaveContractQuery', - context: { - tx_id: tx_id, - error: error - } - } - ); + this._logger.info("invalid arweave tx id", { + ...request, + origin: "arweaveContractQuery", + context: { + tx_id: tx_id, + error: error, + }, + }); return undefined; } }; - resolveArweave = async (request: IRequestContext, tx_id: string, ens_name: string) => { + resolveArweave = async ( + request: IRequestContext, + tx_id: string, + ens_name: string, + ) => { const state = await this.arweaveContractQuery(request, tx_id); if (!state) { return tx_id; @@ -56,10 +50,6 @@ export class ArweaveResolver implements IArweaveResolver { var match: string | null = null; - //BUG: this uses longest prefix matching on the ens name, which means that it is technically incorrect - // what we should be doing is requiring subdomains to be explicit to the ENS name - // I don't know if there's a funny edge case here - keys.forEach((key: string) => { if (ens_name.startsWith(key)) { if (match == null || match.length < key.length) { @@ -75,16 +65,16 @@ export class ArweaveResolver implements IArweaveResolver { } else { this._logger.error( //`resolveArweave: invalid record ${match} found for ${tx_id} ${ens_name}`, - 'invalid arweave record found', + "invalid arweave record found", { ...request, - origin: 'resolveArweave', + origin: "resolveArweave", context: { match: match, tx_id: tx_id, - ens_name: ens_name - } - } + ens_name: ens_name, + }, + }, ); } } @@ -94,53 +84,50 @@ export class ArweaveResolver implements IArweaveResolver { if (typeof ret === "string") { return ret; } else { - this._logger.error( - 'invalid arweave @ record found', - { - ...request, - origin: 'resolveArweave', - context: { - tx_id: tx_id, - ens_name: ens_name, - records, - } - } - ); + this._logger.error("invalid arweave @ record found", { + ...request, + origin: "resolveArweave", + context: { + tx_id: tx_id, + ens_name: ens_name, + records, + }, + }); } } - this._logger.warn('no arweave @ record found', - { + this._logger.warn("no arweave @ record found", { ...request, - origin: 'resolveArweave', + origin: "resolveArweave", context: { tx_id: tx_id, ens_name: ens_name, records, - } + }, }); return tx_id; }; } -export const arweaveTxIdToArweaveSandboxSubdomainId = async (request: IRequestContext, logger: ILoggerService, tx_id: string) => { +export const arweaveTxIdToArweaveSandboxSubdomainId = async ( + request: IRequestContext, + logger: ILoggerService, + tx_id: string, +) => { try { return base32 .stringify(Buffer.from(tx_id, "base64"), { pad: false }) .toLowerCase(); } catch (e) { - logger.error( - 'invalid arweave tx id', - { - ...request, - origin: 'arweaveTxIdToArweaveSandboxSubdomainId', - context: { - tx_id: tx_id, - error: e - } - } - ); + logger.error("invalid arweave tx id", { + ...request, + origin: "arweaveTxIdToArweaveSandboxSubdomainId", + context: { + tx_id: tx_id, + error: e, + }, + }); return undefined; } }; @@ -151,7 +138,11 @@ export const arweaveUrlToSandboxSubdomain = async ( tx_id: string, arweave_gateway: URL, ): Promise => { - const subdomain = await arweaveTxIdToArweaveSandboxSubdomainId(request, logger, tx_id); + const subdomain = await arweaveTxIdToArweaveSandboxSubdomainId( + request, + logger, + tx_id, + ); if (!subdomain) { return arweave_gateway; } @@ -160,4 +151,4 @@ export const arweaveUrlToSandboxSubdomain = async ( url.host = subdomain + "." + url.host; return url; -}; \ No newline at end of file +}; diff --git a/src/services/EnsResolverService/const.ts b/packages/dweb-api-resolver/src/resolver/const.ts similarity index 100% rename from src/services/EnsResolverService/const.ts rename to packages/dweb-api-resolver/src/resolver/const.ts diff --git a/src/services/EnsResolverService/index.ts b/packages/dweb-api-resolver/src/resolver/index.ts similarity index 56% rename from src/services/EnsResolverService/index.ts rename to packages/dweb-api-resolver/src/resolver/index.ts index 750f4b9..593fa36 100644 --- a/src/services/EnsResolverService/index.ts +++ b/packages/dweb-api-resolver/src/resolver/index.ts @@ -1,46 +1,25 @@ import { CID } from "multiformats"; import { bases } from "multiformats/basics"; -import { inject, injectable } from "inversify"; -import { DITYPES } from "../../dependencies/types"; -import { INameService, INameServiceFactory, NameServiceFactory } from "../NameService"; -import { ILoggerService } from "../LoggerService"; -import { ICacheService } from "../CacheService"; -import { peerIdFromString } from '@libp2p/peer-id' +import { INameServiceFactory } from "dweb-api-types/dist/name-service.js"; +import { ICacheService } from "dweb-api-types/dist/cache.js"; +import { peerIdFromString } from "@libp2p/peer-id"; import * as z from "zod"; -import { IArweaveResolver } from "./arweave"; -import { IKuboApiService } from "../KuboApiService"; -import { IRequestContext } from "../lib"; +import { IArweaveResolver } from "dweb-api-types/dist/arweave.js"; +import { IKuboApiService } from "dweb-api-types/dist/kubo-api.js"; +import { IRequestContext } from "dweb-api-types/dist/request-context.js"; +import { ILoggerService } from "dweb-api-types/dist/logger.js"; +import { IRecord, Record } from "dweb-api-types/dist/ens-resolver.js"; -const RECORD_CODEC_TYPE = z.enum(["ipfs-ns", "ipns-ns", "arweave-ns", "swarm"]); - -const Record = z - .union([ - z.object({ - _tag: z.literal("Record"), - codec: RECORD_CODEC_TYPE, - DoHContentIdentifier: z.string(), - }), - z.object({ - _tag: z.literal("ens-socials-redirect"), - ensName: z.string(), - }), - ]) - .nullable(); - -export type Record = z.infer; -export interface ProxyRecord { - XContentLocation: string; - XContentPath: string; -} - -const calculateIpfsIpnsSubdomainRecord = ( +export const calculateIpfsIpnsSubdomainRecord = ( method: "ipfs-ns" | "ipns-ns", peerIdOrCid: string, -): Record => { + ensName: string, +): IRecord => { return { _tag: "Record", codec: method, DoHContentIdentifier: peerIdOrCid, + ensName, }; }; @@ -49,35 +28,35 @@ export async function parseRecord( logger: ILoggerService, content: string, hostname: string, -): Promise { - +): Promise { if (content.startsWith("ipfs://")) { const ipfsval = content.split("ipfs://")[1]; var base32Cid = formatCid(request, logger, ipfsval, hostname); if (!base32Cid) { return null; } - return calculateIpfsIpnsSubdomainRecord("ipfs-ns", base32Cid); + return calculateIpfsIpnsSubdomainRecord("ipfs-ns", base32Cid, hostname); } else if (content.startsWith("ipns://")) { const ipnsval = content.split("ipns://")[1]; var base36PeerId = getPeerId(request, logger, ipnsval, hostname); if (!base36PeerId) { - logger.error('not supported', { + logger.error("not supported", { ...request, - origin: 'parseRecord', + origin: "parseRecord", context: { ipnsval: ipnsval, - } + }, }); return null; } - return calculateIpfsIpnsSubdomainRecord("ipns-ns", base36PeerId); + return calculateIpfsIpnsSubdomainRecord("ipns-ns", base36PeerId, hostname); } else if (content.startsWith("arweave://")) { const cleanPath = content.split("arweave://")[1]; return { _tag: "Record", codec: "arweave-ns", DoHContentIdentifier: cleanPath, + ensName: hostname, }; } else if (content.startsWith("bzz://")) { const cleanPath = content.split("bzz://")[1]; @@ -85,6 +64,7 @@ export async function parseRecord( _tag: "Record", codec: "swarm", DoHContentIdentifier: cleanPath, + ensName: hostname, }; } else { return null; @@ -93,7 +73,9 @@ export async function parseRecord( //this is sort of redundant //when ipns -> ipfs resolution is performed, we get the internal path representation instead of protocol representation //this performs the conversion /ipns/... -> ipns://... so that ipns://... can be properly parsed by parseRecord -function ipfsInternalPathRepresentationToCanonicalProtocolRepresentation(content: string) { +function ipfsInternalPathRepresentationToCanonicalProtocolRepresentation( + content: string, +) { let contentIpfsInternalsSanitized = content; if (content.startsWith("/ipns/")) { contentIpfsInternalsSanitized = content.replace("/ipns/", "ipns://"); @@ -103,71 +85,78 @@ function ipfsInternalPathRepresentationToCanonicalProtocolRepresentation(content return contentIpfsInternalsSanitized; } -export function getPeerId(request: IRequestContext, logger: ILoggerService, value: string, hostname: string) { +export function getPeerId( + request: IRequestContext, + logger: ILoggerService, + value: string, + hostname: string, +) { var peerId; try { peerId = peerIdFromString(value).toCID().toString(); } catch (err) { - if (err instanceof RangeError && err.message.startsWith('Unable to decode multibase string')) { + if ( + err instanceof RangeError && + err.message.startsWith("Unable to decode multibase string") + ) { logger.info( - 'Unable to decode multibase string, probably using another ENS record for hostname', + "Unable to decode multibase string, probably using another ENS record for hostname", { ...request, - origin: 'getPeerId', + origin: "getPeerId", context: { value: value, hostname: hostname, - error: err - } - } + error: err, + }, + }, ); return value; - } - else if (err instanceof Error && err.message.startsWith('Non-base36 character')) { - logger.info( - `Non-base36 character, probably using DNSLink`, - { - ...request, - origin: 'getPeerId', - context: { - value: value, - hostname: hostname, - } - }); - return value; - } - else { - logger.error( - 'Error converting IPNS PeerID', - { - ...request, - origin: 'getPeerId', - context: { - value: value, - hostname: hostname, - error: err - } - } - ); - return null; - } - } - try { - const peerIdCid = formatCid(request, logger, peerId.toString(), hostname, "peerId"); - return peerIdCid; - } catch (err) { - logger.error( - 'Error formatting IPNS PeerID', - { + } else if ( + err instanceof Error && + err.message.startsWith("Non-base36 character") + ) { + logger.info(`Non-base36 character, probably using DNSLink`, { ...request, - origin: 'getPeerId', + origin: "getPeerId", context: { value: value, hostname: hostname, - error: err - } - } + }, + }); + return value; + } else { + logger.error("Error converting IPNS PeerID", { + ...request, + origin: "getPeerId", + context: { + value: value, + hostname: hostname, + error: err, + }, + }); + return null; + } + } + try { + const peerIdCid = formatCid( + request, + logger, + peerId.toString(), + hostname, + "peerId", ); + return peerIdCid; + } catch (err) { + logger.error("Error formatting IPNS PeerID", { + ...request, + origin: "getPeerId", + context: { + value: value, + hostname: hostname, + error: err, + }, + }); return null; } } @@ -184,7 +173,7 @@ function formatCid( try { const prefix = value.substring(0, 1); const base = Object.keys(bases) - .map((key: baseKeys) => bases[key]) + .map((key) => bases[key as baseKeys]) .filter((x) => { return x.prefix.toString() === prefix; })[0]; @@ -203,18 +192,15 @@ function formatCid( return cid; } } catch (err) { - logger.error( - 'Error converting IPFS multihash', - { - ...request, - origin: 'formatCid', - context: { - value: value, - hostname: hostname, - error: err - } - } - ); + logger.error("Error converting IPFS multihash", { + ...request, + origin: "formatCid", + context: { + value: value, + hostname: hostname, + error: err, + }, + }); return null; } } @@ -229,23 +215,25 @@ export type IEnsResolverServiceResolveEnsRet = z.infer< >; export interface IEnsResolverService { - resolveEns(request: IRequestContext, hostname: string): Promise; + resolveEns( + request: IRequestContext, + hostname: string, + ): Promise; } -@injectable() export class EnsResolverService implements IEnsResolverService { private _logger: ILoggerService; private _cacheService: ICacheService; private _arweaveResolver: IArweaveResolver; - private _kuboApiService: IKuboApiService; + private _kuboApiService: IKuboApiService | null; private _nameServiceFactory: INameServiceFactory; constructor( - @inject(DITYPES.LoggerService) logger: ILoggerService, - @inject(DITYPES.CacheService) cacheService: ICacheService, - @inject(DITYPES.ArweaveResolver) arweaveResolver: IArweaveResolver, - @inject(DITYPES.KuboApiService) kuboApiService: IKuboApiService, - @inject(DITYPES.NameServiceFactory) nameServiceFactory: INameServiceFactory + logger: ILoggerService, + cacheService: ICacheService, + arweaveResolver: IArweaveResolver, + kuboApiService: IKuboApiService | null, + nameServiceFactory: INameServiceFactory, ) { this._logger = logger; this._cacheService = cacheService; @@ -259,23 +247,20 @@ export class EnsResolverService implements IEnsResolverService { hostname: string, ): Promise { try { - const nameService = this._nameServiceFactory.getNameServiceForDomain(request, hostname); + const nameService = this._nameServiceFactory.getNameServiceForDomain( + request, + hostname, + ); let contentHash = await nameService.getContentHash(request, hostname); - this._logger.debug( - 'contenthash', - { - ...request, - origin: 'resolveEns', - context: { - contentHash: contentHash - } - } - ) - if (contentHash.error) { - throw contentHash.reason; - } + this._logger.debug("contenthash", { + ...request, + origin: "resolveEns", + context: { + contentHash: contentHash, + }, + }); - let res = contentHash.result; + let res = contentHash; if (!res) { return { @@ -289,36 +274,50 @@ export class EnsResolverService implements IEnsResolverService { if (res.startsWith("arweave://")) { const ar_id = res.split("arweave://")[1]; - this._logger.debug('ar_id', { + this._logger.debug("ar_id", { ...request, - origin: 'resolveEns', + origin: "resolveEns", context: { - ar_id: ar_id - } + ar_id: ar_id, + }, }); - res = "arweave://" + (await this._arweaveResolver.resolveArweave(request, ar_id, hostname)); - } else if(res.startsWith("ipns://")) { - this._logger.debug('resolving ipns', { + res = + "arweave://" + + (await this._arweaveResolver.resolveArweave( + request, + ar_id, + hostname, + )); + } else if (res.startsWith("ipns://")) { + this._logger.debug("resolving ipns", { ...request, - origin: 'resolveEns', + origin: "resolveEns", context: { - res: res - } + res: res, + }, }); - let ret = (await this._kuboApiService.resolveIpnsName(request, res)); + let ret = await this._kuboApiService?.resolveIpnsName(request, res); if (ret) { - res = ipfsInternalPathRepresentationToCanonicalProtocolRepresentation(ret); + res = + ipfsInternalPathRepresentationToCanonicalProtocolRepresentation( + ret, + ); } } - const r: Record = await parseRecord(request, this._logger, res, hostname); - this._logger.debug('record', { + const r: IRecord = await parseRecord( + request, + this._logger, + res, + hostname, + ); + this._logger.debug("record", { ...request, - origin: 'resolveEns', + origin: "resolveEns", context: { - record: r - } + record: r, + }, }); const retval: IEnsResolverServiceResolveEnsRet = { record: r, @@ -327,13 +326,13 @@ export class EnsResolverService implements IEnsResolverService { return retval; } catch (err) { - this._logger.error('resolution failure', { + this._logger.error("resolution failure", { ...request, - origin: 'resolveEns', + origin: "resolveEns", context: { hostname: hostname, - error: err - } + error: err, + }, }); throw err; } diff --git a/packages/dweb-api-resolver/src/resolver/utils.ts b/packages/dweb-api-resolver/src/resolver/utils.ts new file mode 100644 index 0000000..69d14ac --- /dev/null +++ b/packages/dweb-api-resolver/src/resolver/utils.ts @@ -0,0 +1,158 @@ +import { recordNamespaceToUrlHandlerMap } from "./const.js"; +import { ILoggerService } from "dweb-api-types/dist/logger.js"; +import { arweaveUrlToSandboxSubdomain } from "./arweave.js"; +import { IRequestContext } from "dweb-api-types/dist/request-context.js"; +import { IRecord } from "dweb-api-types/dist/ens-resolver.js"; +import { ProxyRecord } from "dweb-api-types/dist/dweb-api-resolver.js"; +import { + IConfigurationArweave, + IConfigurationEnsSocials, + IConfigurationIpfs, + IConfigurationSwarm, +} from "dweb-api-types/dist/config.js"; + +export const ensureTrailingSlash = (path: string) => { + if (path.endsWith("/")) { + return path; + } else { + return path + "/"; + } +}; + +export const trimExtraneousTrailingSlashes = (path: string) => { + return path.replace(/\/+$/, "/"); +}; + +export const trimExtraneousLeadingSlashes = (path: string) => { + return path.replace(/^\/+/, "/"); +}; + +export interface ProxyRecordUnableToRedirect { + _tag: "ProxyRecordUnableToRedirect"; + record: IRecord; +} + +export const recordToProxyRecord = async ( + request: IRequestContext, + config: IConfigurationEnsSocials & + IConfigurationIpfs & + IConfigurationArweave & + IConfigurationSwarm, + logger: ILoggerService, + record: NonNullable, +): Promise< + ((IRecord & ProxyRecord) | ProxyRecordUnableToRedirect) & { + overrideCodecHeader?: string; + } +> => { + const socialsEndpointConfig = config.getConfigEnsSocialsEndpoint(); + const ipfsConfig = config.getConfigIpfsBackend(); + const arweaveConfig = config.getConfigArweaveBackend(); + const swarmConfig = config.getConfigSwarmBackend(); + var path = "/"; + var overrideCodecHeader: string | undefined = undefined; + if (record._tag === "ens-socials-redirect") { + if (!socialsEndpointConfig.getEnsSocialsEndpoint) { + return { + _tag: "ProxyRecordUnableToRedirect", + record: record, + }; + } + const redirectUrl = new URL( + socialsEndpointConfig.getEnsSocialsEndpoint(record.ensName), + ); + return { + ...record, + XContentLocation: redirectUrl.origin, + XContentPath: ensureTrailingSlash( + redirectUrl.pathname + redirectUrl.search, + ), + }; + } else if (record._tag === "Record") { + if (record.codec === "ipfs-ns" || record.codec === "ipns-ns") { + const url = new URL(ipfsConfig.getBackend()); + var path = "/"; + const urlSafeIpfsOrIpns = recordNamespaceToUrlHandlerMap[record.codec]; + if (ipfsConfig.getSubdomainSupport()) { + let DoHContentIdentifier = record.DoHContentIdentifier; + if (record.codec === "ipns-ns") { + DoHContentIdentifier = + normalizeUrlFragmentForIpfsSubdomainGateway(DoHContentIdentifier); + } + /* + if the DoHContentIdentifier is less than 64 characters, it can not be encoded as a DNS fragment + we must encode this in the proxy logic because the IPFS gateway will perform a DoH query + the DoH query must not resolve to ensname-eth.ipns.gateway because that will cause a loop + */ + const encodedEnsName = normalizeUrlFragmentForIpfsSubdomainGateway( + record.ensName, + ); + if (record.DoHContentIdentifier.length < 64) { + url.host = `${DoHContentIdentifier}.${urlSafeIpfsOrIpns}.${url.host}`; + } else if (encodedEnsName.length < 64) { + url.host = `${encodedEnsName}.ipns.${url.host}`; + overrideCodecHeader = "ipns-ns"; + } else { + logger.error("IPNS name can not be encoded as a DNS fragment", { + ...request, + origin: "recordToProxyRecord", + context: { + record, + }, + }); + throw new Error("IPNS name can not be encoded as a DNS fragment"); + } + } else { + path = `/${urlSafeIpfsOrIpns}/${record.DoHContentIdentifier}/`; + } + return { + ...record, + XContentLocation: url.toString(), + XContentPath: path, + overrideCodecHeader, + }; + } else if (record.codec === "arweave-ns") { + const backend = new URL(arweaveConfig.getBackend()); + return { + ...record, + XContentLocation: ( + await arweaveUrlToSandboxSubdomain( + request, + logger, + record.DoHContentIdentifier, + backend, + ) + ).toString(), + XContentPath: ensureTrailingSlash("/" + record.DoHContentIdentifier), + }; + } else if (record.codec === "swarm") { + return { + ...record, + XContentLocation: swarmConfig.getBackend(), + XContentPath: ensureTrailingSlash( + "/bzz/" + record.DoHContentIdentifier, + ), + }; + } + //record.codec should be never due to exhaustivity check + return record.codec; + } else { + //record should be never due to exhaustivity check + return record; + } +}; +export function normalizeUrlFragmentForIpfsSubdomainGateway( + DoHContentIdentifier: string, +): string { + return [...DoHContentIdentifier] + .map((c) => { + if (c == ".") { + return "-"; + } else if (c == "-") { + return "--"; + } else { + return c; + } + }) + .join(""); +} diff --git a/packages/dweb-api-resolver/tsconfig.json b/packages/dweb-api-resolver/tsconfig.json new file mode 100644 index 0000000..398a837 --- /dev/null +++ b/packages/dweb-api-resolver/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "../../tsconfig.base.json", + "include": [ + "./src/**/*.ts" + ], + "compilerOptions": { + "rootDir": "./src", + "outDir": "./dist", + "baseUrl": "./src", + "moduleResolution": "bundler", + "module": "ES2022", + "skipLibCheck": true + }, +} \ No newline at end of file diff --git a/.mocharc.json b/packages/dweb-api-server/.mocharc.json similarity index 79% rename from .mocharc.json rename to packages/dweb-api-server/.mocharc.json index 25011ac..af0704a 100644 --- a/.mocharc.json +++ b/packages/dweb-api-server/.mocharc.json @@ -1,5 +1,5 @@ { - "extensions": ["ts"], + "extensions": ["ts", "js"], "spec": ["src/**/*.spec.*"], "require": ["ts-node/register"], "loader": "ts-node/esm", diff --git a/packages/dweb-api-server/package.json b/packages/dweb-api-server/package.json new file mode 100644 index 0000000..8c8ea89 --- /dev/null +++ b/packages/dweb-api-server/package.json @@ -0,0 +1,50 @@ +{ + "name": "dweb-api-server", + "version": "1.0.0", + "description": "Proxy middleware for ENS and other on chain naming services", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "dev": "nodemon -e js,ts --watch src --exec \"npx tsx src/index.ts\"", + "fmt": "prettier src -w", + "test": "mocha --exit", + "test:coverage": "nyc mocha --exit", + "build": "tsc", + "run-build": "node --import=extensionless/register dist/index.js" + }, + "author": "eth.limo team", + "license": "MIT", + "dependencies": { + "@ensdomains/content-hash": "^3.0.0-beta.5", + "@libp2p/peer-id": "^4.0.9", + "@web3-name-sdk/core": "^0.2.0", + "cors": "^2.8.5", + "dns-packet": "^5.6.0", + "dweb-api-cache": "file:../dweb-api-cache", + "dweb-api-logger": "file:../dweb-api-logger", + "dweb-api-resolver": "file:../dweb-api-resolver", + "ethers": "^6.13.1", + "express": "^4.18.2", + "extensionless": "^1.9.6", + "ioredis": "^5.3.2", + "multiformats": "^13.0.0", + "node-cache": "^5.1.2", + "punycode": "^2.3.0", + "superagent": "10.2.0", + "typeserializer": "^0.2.5", + "warp-contracts": "^1.4.25" + }, + "devDependencies": { + "@types/cors": "^2.8.13", + "@types/dns-packet": "^5.2.4", + "@types/express": "^4.17.17", + "@types/http-proxy": "^1.17.11", + "@types/punycode": "^2.1.3", + "@types/superagent": "^8.1.1", + "@types/url-regex-safe": "^1.0.0", + "ioredis-mock": "^8.9.0", + "node-mocks-http": "^1.14.1", + "url-regex-safe": "^4.0.0" + }, + "type": "module" +} diff --git a/packages/dweb-api-server/src/configuration/index.ts b/packages/dweb-api-server/src/configuration/index.ts new file mode 100644 index 0000000..6aa952c --- /dev/null +++ b/packages/dweb-api-server/src/configuration/index.ts @@ -0,0 +1,588 @@ +import { + IAskEndpointConfig, + IConfigHostnameSubstitution, + IConfigurationArweave, + IConfigurationEnsSocials, + IConfigurationEthereum, + IConfigurationEthereumFailover, + IConfigurationGnosis, + ICacheConfig, + IConfigurationIpfs, + IConfigurationServerAsk, + IConfigurationServerDnsquery, + IConfigurationServerRouter, + IDomainQueryConfig, + IRedisConfig, + IConfigurationSwarm, + IConfigurationLogger, + IConfigurationKubo, +} from "dweb-api-types/dist/config"; +import { parseRawConfig } from "dweb-api-resolver/dist/HostnameSubstitutionService/parseRawConfig"; +export const VALID_ENS_TLDS = ["eth", "gno", "art"]; + +const configuration = { + // Ethereum JSON RPC endpoint + ethereum: { + rpc: process.env.ETH_RPC_ENDPOINT || "http://192.168.1.7:8845", + failover_primary: process.env.ETH_RPC_ENDPOINT_FAILOVER_PRIMARY || null, + failover_secondary: process.env.ETH_RPC_ENDPOINT_FAILOVER_SECONDARY || null, + provider_stall_timeout_ms: parseInt( + process.env.ETH_PROVIDER_STALL_TIMEOUT_MS || "200", + ), //see fallbackProviderConfig.stallTimeout + provider_timeout_ms: parseInt( + process.env.ETH_PROVIDER_TIMEOUT_MS || "7000", + ), //see provider._getConnection().timeout + quorum: parseInt(process.env.ETH_PROVIDER_QUORUM || "1"), + }, + gnosis: { + rpc: process.env.GNO_RPC_ENDPOINT || "https://rpc.gnosischain.com", + }, + // Storage backends + ipfs: { + backend: process.env.IPFS_TARGET || "http://localhost:8080", + auth: process.env.IPFS_AUTH_KEY || null, + //if true, proxies {cid}.{ipfs/ipns}.IPFS_TARGET + subdomainSupport: + process.env.IPFS_SUBDOMAIN_SUPPORT === "true" ? true : false, + //ms before we give up and just return an ipns record + kubo_timeout_ms: parseInt(process.env.IPFS_KUBO_TIMEOUT_MS || "2500"), + //this has no default because we assume this isn't available + kubo_api_url: + (process.env.IPFS_KUBO_API_URL && + new URL(process.env.IPFS_KUBO_API_URL)) || + undefined, + }, + arweave: { + backend: process.env.ARWEAVE_TARGET || "https://arweave.net", + }, + swarm: { + backend: process.env.SWARM_TARGET || "https://api.gateway.ethswarm.org", + }, + redis: { + url: process.env.REDIS_URL || "redis://127.0.0.1:6379", + }, + cache: { + ttl: parseInt(process.env.CACHE_TTL || "300"), + }, + // Proxy + router: { + listen: process.env.LISTEN_PORT || 8888, + origin: "LIMO Proxy", + hostnameSubstitutionConfig: + process.env.LIMO_HOSTNAME_SUBSTITUTION_CONFIG || + JSON.stringify({ + "eth.limo": "eth", + "eth.local": "eth", + "gno.limo": "gno", + "gno.local": "gno" + }), + }, + // Server ask endpoint + ask: { + listen: process.env.ASK_LISTEN_PORT || 9090, + enabled: process.env.ASK_ENABLED || "false", + rate: { + limit: Number(process.env.ASK_RATE_LIMIT ?? 10), + //configuration.ask.rate.period: input in minutes, actual value in seconds + period: Number(process.env.ASK_RATE_PERIOD ?? 15) * 60, + enabled: false, //set via limit = 0 + }, + //this is applied before the suffix transformation + max_label_limit: Number(process.env.ASK_MAX_LABEL_LIMIT ?? 10), + }, + //dns-query isolated endpoint (DOH) + dnsquery: { + listen: process.env.DNSQUERY_LISTEN_PORT || 11000, + enabled: process.env.DNSQUERY_ENABLED === "false" ? false : true, + }, + tests: { + hostname: "vitalik.eth", + }, + ens: { + socialsEndpoint: (ens: string) => { + return `https://landing.nimi.page${ens ? "/?ens=" + encodeURI(ens) : ""}`; + }, + socialsEndpointEnabled: + process.env.ENS_SOCIALS_ENDPOINT_ENABLED === "true" ? true : false, + }, + domainsapi: { + ttl: 60, + endpoint: process.env.DOMAINSAPI_ENDPOINT, + max_hops: 5, //e.g. asdf.limo -> whatever -> whatever.eth + }, + logging: { + level: process.env.LOG_LEVEL || "info", + }, +}; +configuration.ask.rate.enabled = configuration.ask.rate.limit > 0; + +//throw early if the hostname substitution configuration is invalid +{ + parseRawConfig(configuration.router.hostnameSubstitutionConfig); +} + +export class TestConfigurationService implements ServerConfiguration { + private configuration: typeof configuration; + + constructor() { + this.configuration = JSON.parse(JSON.stringify(configuration)); + this.configuration.ethereum.rpc = "http://localhost:69420"; //ethers is shimmed + this.configuration.ethereum.failover_primary = null; + this.configuration.ethereum.failover_secondary = null; + this.configuration.ethereum.quorum = 1; + this.configuration.ethereum.provider_stall_timeout_ms = 200; + this.configuration.ipfs.backend = "https://ipfs"; //ipfs is never actually queried + this.configuration.ipfs.auth = null; + this.configuration.ipfs.subdomainSupport = true; + this.configuration.redis.url = "redis://redis"; //redis is shimmed + this.configuration.ask.enabled = "false"; + this.configuration.dnsquery.enabled = false; + this.configuration.cache.ttl = 69; + this.configuration.logging.level = "debug"; + this.configuration.swarm.backend = "https://swarm"; //swarm is never actually queried + this.configuration.arweave.backend = "https://arweave"; //arweave is never actually queried + this.configuration.ens.socialsEndpoint = (ens: string) => { + return `https://socials.com?name=${ens}`; + }; + this.configuration.domainsapi.endpoint = "https://domainsapi"; //this needs to be set otherwise it will short circuit to not blacklisted + this.configuration.ask.rate.enabled = false; + //the rate limiter being set to 2 ensures that any shared state between test cases causes a test failure explosion + //this is a good thing, as it means that debugging a bug in the test harness is easier + //the rate limiter is a good smell test for accidental shared state in the test harness because it's a state machine and there are definitely at least 2 cases that can hit it + //a bug in the test harness was discovered because there were tests that were erroring out due to rate limiting being triggered erroneously + //the problem was that beforeEach and afterEach weren't being called because mocha's 'it' wasn't getting a correct binding to the Mocha.Suite + //mocha relies on stateful access of Mocha.Suite (the thisvar in a 'describe' function) to do something similar to what our test harness does on top of Mocha + //for more information, look up why mocha doesn't support arrow functions and requires using regular `function (params) {}` blocks + this.configuration.ask.rate.limit = 2; + this.configuration.ask.rate.period = 30; + //we choose not to test with this because the default behavior for the kubo service is to die quickly and revert to the regular behavior where kubo is absent + this.configuration.ipfs.kubo_api_url = undefined; + } + + set(callback: (configuration: typeof this.configuration) => void) { + callback(this.configuration); + } + + getServerConfiguration() { + return configurationToServerConfiguration(this.configuration); + } + + getConfigAskEndpoint = () => { + return this.getServerConfiguration().getConfigAskEndpoint(); + }; + + getHostnameSubstitutionConfig = () => { + return this.getServerConfiguration().getHostnameSubstitutionConfig(); + }; + + getConfigArweaveBackend = () => { + return this.getServerConfiguration().getConfigArweaveBackend(); + }; + + getConfigEnsSocialsEndpoint = () => { + return this.getServerConfiguration().getConfigEnsSocialsEndpoint(); + }; + + getConfigEthereumBackend = () => { + return this.getServerConfiguration().getConfigEthereumBackend(); + }; + + getConfigEthereumFailover = () => { + return this.getServerConfiguration().getConfigEthereumFailover(); + }; + + getConfigGnosisBackend = () => { + return this.getServerConfiguration().getConfigGnosisBackend(); + }; + + getCacheConfig = () => { + return this.getServerConfiguration().getCacheConfig(); + }; + + getDomainQueryConfig = () => { + return this.getServerConfiguration().getDomainQueryConfig(); + }; + + getRedisConfig = () => { + return this.getServerConfiguration().getRedisConfig(); + }; + + getRouterConfig = () => { + return this.getServerConfiguration().getRouterConfig(); + }; + + getConfigSwarmBackend = () => { + return this.getServerConfiguration().getConfigSwarmBackend(); + }; + + getLoggerConfig = () => { + return this.getServerConfiguration().getLoggerConfig(); + }; + + getConfigIpfsBackend = () => { + return this.getServerConfiguration().getConfigIpfsBackend(); + }; + + getAskRouterConfig = () => { + return this.getServerConfiguration().getAskRouterConfig(); + }; + + getDnsqueryRouterConfig = () => { + return this.getServerConfiguration().getDnsqueryRouterConfig(); + }; + + getKuboConfiguration = () => { + return this.getServerConfiguration().getKuboConfiguration(); + }; +} + +/** + * + * NOTE: The following functions are used to convert the configuration object to the respective configuration interfaces + * the parameter config is explicitly destructured to ensure that refactoring at either end of the configuration pipeline will cause specific errors + * changes made to these interfaces must be purposeful + * + * the configuration object is destructured into getters for the respective domain and then recombined + * config -> IXconfig & IYconfig & IZConfig -> ServerConfig + * + */ + +export const configurationToIAskEndpointConfig = (config: { + ask: { + rate: { + limit: number; + period: number; + enabled: boolean; + }; + max_label_limit: number; + }; +}): IAskEndpointConfig => { + return { + getConfigAskEndpoint: () => { + return { + getRateLimit: () => config.ask.rate.limit, + getRatePeriod: () => config.ask.rate.period, + getRateEnabled: () => config.ask.rate.enabled, + getMaxLabelLimit: () => config.ask.max_label_limit, + }; + }, + }; +}; + +export const configurationToIConfigHostnameSubstitution = (config: { + router: { + hostnameSubstitutionConfig: string; + }; +}): IConfigHostnameSubstitution => { + return { + getHostnameSubstitutionConfig: () => { + return parseRawConfig(config.router.hostnameSubstitutionConfig); + }, + }; +}; + +export const configurationToIConfigurationArweave = (config: { + arweave: { + backend: string; + }; +}): IConfigurationArweave => { + return { + getConfigArweaveBackend: () => { + return { + getBackend: () => config.arweave.backend, + }; + }, + }; +}; + +export const configurationToIConfigurationEnsSocials = (config: { + ens: { + socialsEndpoint: (ens: string) => string; + socialsEndpointEnabled: boolean; + }; +}): IConfigurationEnsSocials => { + return { + getConfigEnsSocialsEndpoint: () => { + return { + getEnsSocialsEndpoint: + (config.ens.socialsEndpointEnabled && config.ens.socialsEndpoint) || + null, + }; + }, + }; +}; + +export const configurationToIConfigurationEthereum = (config: { + ethereum: { + rpc: string; + }; +}): IConfigurationEthereum => { + return { + getConfigEthereumBackend: () => { + return { + getBackend: () => config.ethereum.rpc, + }; + }, + }; +}; + +export const configurationToIConfigurationEthereumFailover = (config: { + ethereum: { + provider_stall_timeout_ms: number; + provider_timeout_ms: number; + quorum: number; + failover_primary: string | null; + failover_secondary: string | null; + }; +}): IConfigurationEthereumFailover => { + return { + getConfigEthereumFailover: () => { + return { + getStallTimeout: () => config.ethereum.provider_stall_timeout_ms, + getProviderStallTimeout: () => config.ethereum.provider_timeout_ms, + getQuorum: () => config.ethereum.quorum, + getPrimaryFailoverBackend: () => config.ethereum.failover_primary, + getSecondaryFailoverBackend: () => config.ethereum.failover_secondary, + }; + }, + }; +}; + +export const configurationToIConfigurationGnosis = (config: { + gnosis: { + rpc: string; + }; +}): IConfigurationGnosis => { + return { + getConfigGnosisBackend: () => { + return { + getBackend: () => config.gnosis.rpc, + }; + }, + }; +}; + +export const configurationToICacheConfig = (config: { + cache: { + ttl: number; + }; +}): ICacheConfig => { + return { + getCacheConfig: () => { + return { + getTtl: () => config.cache.ttl, + }; + }, + }; +}; + +export const configurationToIDomainQueryConfig = (config: { + domainsapi: { + endpoint: string | undefined; + max_hops: number; + }; +}): IDomainQueryConfig => { + const endpoint = config.domainsapi.endpoint; + return { + getDomainQueryConfig: () => + (endpoint && { + getDomainsApiEndpoint: () => endpoint, + getMaxHops: () => config.domainsapi.max_hops, + }) || + null, + }; +}; + +export const configurationToIRedisConfig = (config: { + redis: { + url: string; + }; +}): IRedisConfig => { + return { + getRedisConfig: () => { + return { + getUrl: () => config.redis.url, + }; + }, + }; +}; + +export const configurationToIConfigurationServerAsk = (config: { + ask: { + listen: string | number; + enabled: string; + }; +}): IConfigurationServerAsk => { + return { + getAskRouterConfig: () => { + return { + getAskRouterListenPort: () => config.ask.listen.toString(), + getAskRouterEnabled: () => config.ask.enabled === "true", + }; + }, + }; +}; + +export const configurationToIConfigurationServerDnsquery = (config: { + dnsquery: { + listen: string | number; + enabled: boolean; + }; +}): IConfigurationServerDnsquery => { + return { + getDnsqueryRouterConfig: () => { + return { + getDnsqueryRouterListenPort: () => config.dnsquery.listen.toString(), + getDnsqueryRouterEnabled: () => config.dnsquery.enabled, + }; + }, + }; +}; + +export const configurationToIConfigurationServerRouter = (config: { + router: { + listen: string | number; + origin: string; + }; +}): IConfigurationServerRouter => { + return { + getRouterConfig: () => { + return { + getRouterListenPort: () => config.router.listen.toString(), + getRouterOrigin: () => config.router.origin, + }; + }, + }; +}; + +export const configurationToIConfigurationSwarm = (config: { + swarm: { + backend: string; + }; +}): IConfigurationSwarm => { + return { + getConfigSwarmBackend: () => { + return { + getBackend: () => config.swarm.backend, + }; + }, + }; +}; + +export const configurationToIConfigurationLogger = (config: { + logging: { + level: string; + }; +}): IConfigurationLogger => { + var level = config.logging.level; + + if ( + level != "debug" && + level != "info" && + level != "warn" && + level != "error" + ) { + console.warn( + JSON.stringify({ + message: "Invalid log level, defaulting to info", + level: level, + }), + ); + level = "info"; + } + + return { + getLoggerConfig: () => { + return { + getLevel: () => + config.logging.level as "debug" | "info" | "warn" | "error", + }; + }, + }; +}; + +export const configurationToIConfigurationIpfs = (config: { + ipfs: { + backend: string; + subdomainSupport: boolean; + }; +}): IConfigurationIpfs => { + return { + getConfigIpfsBackend: () => { + return { + getBackend: () => config.ipfs.backend, + getSubdomainSupport: () => config.ipfs.subdomainSupport, + }; + }, + }; +}; + +export const configurationToIConfigurationKubo = (config: { + ipfs: { + kubo_timeout_ms: number; + kubo_api_url: URL | undefined; + auth: string | null; + }; +}): IConfigurationKubo => { + return { + getKuboConfiguration: () => { + return { + getKuboTimeoutMs: () => config.ipfs.kubo_timeout_ms as number | null, + getKuboApiUrl: () => config.ipfs.kubo_api_url || null, + getKuboAuth: () => config.ipfs.auth || null, + }; + }, + }; +}; + +export type ServerConfiguration = IConfigurationServerRouter & + IConfigurationServerDnsquery & + IConfigurationServerAsk & + IConfigurationEnsSocials & + IConfigurationIpfs & + IConfigurationArweave & + IConfigurationSwarm & + IAskEndpointConfig & + IConfigHostnameSubstitution & + IConfigurationEthereum & + IConfigurationEthereumFailover & + IConfigurationGnosis & + ICacheConfig & + IConfigurationServerDnsquery & + IDomainQueryConfig & + IRedisConfig & + IConfigurationLogger & + IConfigurationKubo; + +export const configurationToServerConfiguration = ( + config: typeof configuration, +): ServerConfiguration & { + _innerConfigurationObject: typeof configuration; +} => { + return { + ...configurationToIAskEndpointConfig(config), + ...configurationToIConfigHostnameSubstitution(config), + ...configurationToIConfigurationArweave(config), + ...configurationToIConfigurationEnsSocials(config), + ...configurationToIConfigurationEthereum(config), + ...configurationToIConfigurationEthereumFailover(config), + ...configurationToIConfigurationGnosis(config), + ...configurationToICacheConfig(config), + ...configurationToIDomainQueryConfig(config), + ...configurationToIRedisConfig(config), + ...configurationToIConfigurationServerAsk(config), + ...configurationToIConfigurationServerDnsquery(config), + ...configurationToIConfigurationServerRouter(config), + ...configurationToIConfigurationSwarm(config), + ...configurationToIConfigurationLogger(config), + ...configurationToIConfigurationIpfs(config), + ...configurationToIConfigurationKubo(config), + _innerConfigurationObject: config, + }; +}; + +export const getDefaultServerConfiguration = (): ServerConfiguration => { + return configurationToServerConfiguration( + JSON.parse(JSON.stringify(configuration)), + ); +}; diff --git a/packages/dweb-api-server/src/dependencies/BindingsManager.ts b/packages/dweb-api-server/src/dependencies/BindingsManager.ts new file mode 100644 index 0000000..80e0aca --- /dev/null +++ b/packages/dweb-api-server/src/dependencies/BindingsManager.ts @@ -0,0 +1,28 @@ +export enum EnvironmentConfiguration { + Production = "Production", + Development = "Development", +} + +export type BindingEnvironmentConfig = { + [K in EnvironmentConfiguration]: (env: EnvironmentConfiguration) => T; +}; + +export class EnvironmentBinding { + private bindings: BindingEnvironmentConfig; + + private cache: Map = new Map(); + + constructor(bindings: BindingEnvironmentConfig) { + this.bindings = bindings; + } + + public getBinding(env: EnvironmentConfiguration): T { + if (this.cache.has(env)) { + return this.cache.get(env) as T; + } else { + const binding = this.bindings[env](env); + this.cache.set(env, binding); + return binding; + } + } +} diff --git a/packages/dweb-api-server/src/dependencies/services.ts b/packages/dweb-api-server/src/dependencies/services.ts new file mode 100644 index 0000000..f4fb0bb --- /dev/null +++ b/packages/dweb-api-server/src/dependencies/services.ts @@ -0,0 +1,258 @@ +import { + ServerConfiguration, + TestConfigurationService, + getDefaultServerConfiguration, +} from "../configuration"; +import { IDnsQuery, DnsQuery } from "../dnsquery"; +import { + IDomainQueryService, + DomainQueryService, + IDomainQuerySuperagentService, + DomainQuerySuperagentService, + TestDomainQuerySuperagentService, +} from "../services/DomainsQueryService"; +import { TestResolverService } from "../test/TestResolverService"; +import { + DomainRateLimitService, + IDomainRateLimitService, +} from "../services/DomainRateLimit"; +import { KuboApiService } from "../services/KuboApiService"; +import { + EnvironmentBinding, + EnvironmentConfiguration, +} from "./BindingsManager"; +import { ILoggerService } from "dweb-api-types/dist/logger"; +import { IRedisClient } from "dweb-api-types/dist/redis"; +import { ICacheService, INamedMemoryCache } from "dweb-api-types/dist/cache"; +import { IKuboApiService } from "dweb-api-types/dist/kubo-api"; +import { + INameService, + INameServiceFactory, +} from "dweb-api-types/dist/name-service"; +import { IArweaveResolver } from "dweb-api-types/dist/arweave"; +import { IEnsResolverService } from "dweb-api-types/dist/ens-resolver"; +import { EnsResolverService } from "dweb-api-resolver/dist/resolver/index"; +import { + HostnameSubstitutionService, + IHostnameSubstitutionService, +} from "dweb-api-resolver/dist/HostnameSubstitutionService/index"; +import { ArweaveResolver } from "dweb-api-resolver/dist/resolver/arweave"; +import { + MemoryCacheFactory, + RedisClient, + TestRedisClient, + LocallyCachedRedisCacheService, +} from "dweb-api-cache/dist"; +import { TestLoggerService, LoggerService } from "dweb-api-logger/dist/index"; +import { NameServiceFactory } from "dweb-api-resolver/dist/nameservice/index"; +import {} from "dweb-api-resolver/dist/resolver/index"; +import { Web3NameSdkService } from "dweb-api-resolver/dist/nameservice/Web3NameSdkService"; +import { EnsService } from "dweb-api-resolver/dist/nameservice/EnsService"; + +export const createApplicationConfigurationBindingsManager = () => { + const configuration = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: () => + getDefaultServerConfiguration(), + [EnvironmentConfiguration.Development]: () => + new TestConfigurationService(), + }); + const logger = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new LoggerService(configuration.getBinding(env)), + [EnvironmentConfiguration.Development]: (env) => + new TestLoggerService(configuration.getBinding(env)), + }); + + const _namedMemoryCacheFactory = new MemoryCacheFactory(); + + const namedMemoryCacheFactory = new EnvironmentBinding< + (x: string) => INamedMemoryCache + >({ + [EnvironmentConfiguration.Production]: (env) => (serviceName: string) => + _namedMemoryCacheFactory.createNamedMemoryCacheFactory( + serviceName, + logger.getBinding(env), + configuration.getBinding(env), + ), + [EnvironmentConfiguration.Development]: (env) => (serviceName: string) => + _namedMemoryCacheFactory.createNamedMemoryCacheFactory( + serviceName, + logger.getBinding(env), + configuration.getBinding(env), + ), + }); + + const redisClient = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new RedisClient(configuration.getBinding(env)), + [EnvironmentConfiguration.Development]: (env) => + new TestRedisClient(configuration.getBinding(env)), + }); + + const cacheService = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new LocallyCachedRedisCacheService( + logger.getBinding(env), + redisClient.getBinding(env), + namedMemoryCacheFactory.getBinding(env), + configuration.getBinding(env), + ), + [EnvironmentConfiguration.Development]: (env) => + new LocallyCachedRedisCacheService( + logger.getBinding(env), + redisClient.getBinding(env), + namedMemoryCacheFactory.getBinding(env), + configuration.getBinding(env), + ), + }); + + const hostnameSubstitution = + new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new HostnameSubstitutionService( + configuration.getBinding(env), + logger.getBinding(env), + ), + [EnvironmentConfiguration.Development]: (env) => + new HostnameSubstitutionService( + configuration.getBinding(env), + logger.getBinding(env), + ), + }); + + const domainQuerySuperagent = + new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new DomainQuerySuperagentService(configuration.getBinding(env)), + [EnvironmentConfiguration.Development]: (_env) => + new TestDomainQuerySuperagentService(), + }); + + const domainQuery = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new DomainQueryService( + logger.getBinding(env), + domainQuerySuperagent.getBinding(env), + cacheService.getBinding(env), + configuration.getBinding(env), + ), + [EnvironmentConfiguration.Development]: (env) => + new DomainQueryService( + logger.getBinding(env), + domainQuerySuperagent.getBinding(env), + cacheService.getBinding(env), + configuration.getBinding(env), + ), + }); + + const kuboApi = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new KuboApiService(logger.getBinding(env), configuration.getBinding(env)), + [EnvironmentConfiguration.Development]: (env) => + new KuboApiService(logger.getBinding(env), configuration.getBinding(env)), + }); + + const web3NameSdk = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new Web3NameSdkService( + configuration.getBinding(env), + logger.getBinding(env), + ), + [EnvironmentConfiguration.Development]: (_env) => new TestResolverService(), + }); + + const ensService = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new EnsService(configuration.getBinding(env), logger.getBinding(env)), + [EnvironmentConfiguration.Development]: (_env) => new TestResolverService(), + }); + + const domainRateLimit = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new DomainRateLimitService( + redisClient.getBinding(env), + logger.getBinding(env), + ), + [EnvironmentConfiguration.Development]: (env) => + new DomainRateLimitService( + redisClient.getBinding(env), + logger.getBinding(env), + ), + }); + + const arweaveResolver = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new ArweaveResolver(logger.getBinding(env)), + [EnvironmentConfiguration.Development]: (_env) => new TestResolverService(), + }); + + const nameServiceFactory = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new NameServiceFactory( + logger.getBinding(env), + ensService.getBinding(env), + web3NameSdk.getBinding(env), + ), + [EnvironmentConfiguration.Development]: (env) => + new NameServiceFactory( + logger.getBinding(env), + ensService.getBinding(env), + web3NameSdk.getBinding(env), + ), + }); + + const ensResolver = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new EnsResolverService( + logger.getBinding(env), + cacheService.getBinding(env), + arweaveResolver.getBinding(env), + kuboApi.getBinding(env), + nameServiceFactory.getBinding(env), + ), + [EnvironmentConfiguration.Development]: (env) => + new EnsResolverService( + logger.getBinding(env), + cacheService.getBinding(env), + arweaveResolver.getBinding(env), + kuboApi.getBinding(env), + nameServiceFactory.getBinding(env), + ), + }); + + const dnsQuery = new EnvironmentBinding({ + [EnvironmentConfiguration.Production]: (env) => + new DnsQuery( + logger.getBinding(env), + configuration.getBinding(env), + domainQuery.getBinding(env), + ensResolver.getBinding(env), + ), + [EnvironmentConfiguration.Development]: (env) => + new DnsQuery( + logger.getBinding(env), + configuration.getBinding(env), + domainQuery.getBinding(env), + ensResolver.getBinding(env), + ), + }); + + return { + configuration, + logger, + namedMemoryCacheFactory, + redisClient, + cacheService, + hostnameSubstitution, + domainQuerySuperagent, + domainQuery, + kuboApi, + web3NameSdk, + ensService, + domainRateLimit, + arweaveResolver, + nameServiceFactory, + ensResolver, + dnsQuery, + }; +}; diff --git a/src/dnsquery/index.ts b/packages/dweb-api-server/src/dnsquery/index.ts similarity index 74% rename from src/dnsquery/index.ts rename to packages/dweb-api-server/src/dnsquery/index.ts index dfc6234..2a8c132 100644 --- a/src/dnsquery/index.ts +++ b/packages/dweb-api-server/src/dnsquery/index.ts @@ -1,15 +1,14 @@ import { errorBuilder, blockedForLegalReasons } from "../expressErrors"; import dnsPacket, { Question, RecordType } from "dns-packet"; -import { IEnsResolverService, Record } from "../services/EnsResolverService"; +import { IEnsResolverService, IRecord } from "dweb-api-types/dist/ens-resolver"; import { Request, Response } from "express"; -import { DITYPES } from "../dependencies/types"; import { IDomainQueryService } from "../services/DomainsQueryService"; import { getTraceIdFromRequest, hostnameIsENSTLD } from "../utils"; -import { recordNamespaceToUrlHandlerMap } from "../services/EnsResolverService/const"; -import { ILoggerService } from "../services/LoggerService"; -import { IConfigurationService } from "../configuration"; -import { inject, injectable } from "inversify"; -import { IRequestContext } from "../services/lib"; +import { ILoggerService } from "dweb-api-types/dist/logger"; +import { IRequestContext } from "dweb-api-types/dist/request-context"; +import { punycodeDomainPartsToUnicode } from "../utils/punycodeConverter"; +import { ICacheConfig, IConfigurationLogger } from "dweb-api-types/dist/config"; +import { recordNamespaceToUrlHandlerMap } from "dweb-api-resolver/dist/resolver/const"; type DoHPacket = { Status: string; @@ -31,16 +30,19 @@ export interface IDnsQuery { dnsqueryGet: (req: Request, res: Response) => Promise; } -@injectable() +type conf = IConfigurationLogger & ICacheConfig; + export class DnsQuery implements IDnsQuery { _logger: ILoggerService; - _configurationService: IConfigurationService; + _configurationService: conf; _domainQueryService: IDomainQueryService; _ensResolverService: IEnsResolverService; - constructor (@inject(DITYPES.LoggerService) logger: ILoggerService, - @inject(DITYPES.ConfigurationService) configuration: IConfigurationService, - @inject(DITYPES.DomainQueryService) domainQueryService: IDomainQueryService, - @inject(DITYPES.EnsResolverService) ensResolverService: IEnsResolverService){ + constructor( + logger: ILoggerService, + configuration: conf, + domainQueryService: IDomainQueryService, + ensResolverService: IEnsResolverService, + ) { this._logger = logger; this._configurationService = configuration; this._domainQueryService = domainQueryService; @@ -48,9 +50,9 @@ export class DnsQuery implements IDnsQuery { } logHeaderError = (request: IRequestContext, header: string, req: Request) => { const val = req.header(header); - this._logger.error('unexpected header', { + this._logger.error("unexpected header", { ...request, - origin: 'dnsquery', + origin: "dnsquery", context: { header, value: val, @@ -65,22 +67,19 @@ export class DnsQuery implements IDnsQuery { question.type.toLowerCase() !== "txt" || !hostnameIsENSTLD(question.name) ) { - this._logger.info( - 'ignoring question', - { - ...request, - origin: 'dnsquery', - context: { - question: question.name, - type: question.type, - }, - } - ); + this._logger.info("ignoring question", { + ...request, + origin: "dnsquery", + context: { + question: question.name, + type: question.type, + }, + }); return null; } - this._logger.info('processing request for ${question.name}', { + this._logger.info("processing request for ${question.name}", { ...request, - origin: 'dnsquery', + origin: "dnsquery", context: { question: question.name, type: question.type, @@ -89,9 +88,9 @@ export class DnsQuery implements IDnsQuery { var dohDomain: string; if (question.name.startsWith("_dnslink.")) { dohDomain = question.name.split("_dnslink.")[1]; - this._logger.info('handled dnslink prefix', { + this._logger.info("handled dnslink prefix", { ...request, - origin: 'dnsquery', + origin: "dnsquery", context: { question: question.name, type: question.type, @@ -101,22 +100,32 @@ export class DnsQuery implements IDnsQuery { } else { dohDomain = question.name; } - - const result = await this._ensResolverService.resolveEns(request, dohDomain); - + + const result = await this._ensResolverService.resolveEns( + request, + punycodeDomainPartsToUnicode(dohDomain), + ); + var link = recordToDnslink(result.record); if (!link) { return null; } - const configuration = this._configurationService.get(); + const cacheConfig = this._configurationService.getCacheConfig(); + const retData = []; + for (var i = 0; i < link.length; i += 255) { + retData.push(link.substring(i, i + 255)); + } return { name: question.name, - ttl: configuration.cache.ttl, - data: link, + ttl: cacheConfig.getTtl(), + data: retData, type: "TXT", }; }; - handleDnsQuery = async (request: IRequestContext, dnsRequest: dnsPacket.Packet) => { + handleDnsQuery = async ( + request: IRequestContext, + dnsRequest: dnsPacket.Packet, + ) => { var responses = []; if (!dnsRequest.questions) { dnsRequest.questions = []; @@ -135,35 +144,29 @@ export class DnsQuery implements IDnsQuery { for (var question of dnsRequest.questions) { const ret = await this.questionToEnsAnswer(request, question); if (ret) { - this._logger.info( - 'response to question', - { - ...request, - origin: 'dnsquery', - context: { - question: question.name, - type: question.type, - answer: ret.data, - }, - } - ); + this._logger.info("response to question", { + ...request, + origin: "dnsquery", + context: { + question: question.name, + type: question.type, + answer: ret.data, + }, + }); responses.push(ret); } else { - this._logger.error( - 'no respionse', - { - ...request, - origin: 'dnsquery', - context: { - question: question.name, - type: question.type, - }, - } - ); + this._logger.error("no respionse", { + ...request, + origin: "dnsquery", + context: { + question: question.name, + type: question.type, + }, + }); srvfail = true; } } - + try { const responsePacket = dnsPacket.encode({ id: dnsRequest.id, @@ -176,9 +179,9 @@ export class DnsQuery implements IDnsQuery { payload: true, data: responsePacket, code: 200, - srvfail + srvfail, }; - } catch (e) { + } catch (e: any) { this._logger.error("When building dns response packet: ", e); return { error: true, @@ -191,41 +194,46 @@ export class DnsQuery implements IDnsQuery { const trace_id = getTraceIdFromRequest(req); const request = { trace_id, - } + }; if (req.header("accept") !== "application/dns-message") { this.logHeaderError(request, "accept", req); errorBuilder(res, 415); return; } - + if (req.headers["content-type"] !== "application/dns-message") { this.logHeaderError(request, "content-type", req); errorBuilder(res, 415); return; } - + const requestBody: string = req.body; let dnsRequest; try { dnsRequest = dnsPacket.decode(Buffer.from(requestBody)); - } catch (e) { + } catch (e: any) { this._logger.error("dnsqueryPost: could not decode DNS packet", e); errorBuilder(res, 500); return; } - + if (dnsRequest.questions) { for (const question of dnsRequest.questions) { - if (await this._domainQueryService.checkBlacklist(request, question.name)) { + if ( + await this._domainQueryService.checkBlacklist( + request, + punycodeDomainPartsToUnicode(question.name), + ) + ) { blockedForLegalReasons(res); return; } } } - + const responsePacket = await this.handleDnsQuery(request, dnsRequest); if (responsePacket.error) { - errorBuilder(res, responsePacket.code); + errorBuilder(res, responsePacket.code, responsePacket.message); return; } else if (responsePacket.payload) { const data = new Uint8Array(responsePacket.data); @@ -238,10 +246,9 @@ export class DnsQuery implements IDnsQuery { }; dnsqueryGet = async (req: Request, res: Response) => { const trace_id = getTraceIdFromRequest(req); - const request:IRequestContext = { + const request: IRequestContext = { trace_id, - } - const configuration = this._configurationService.get(); + }; let dnsRequest: dnsPacket.Packet | null = null; //TODO: is name already punycoded? how does it behave with utf8? //TODO: from the docs, RFC 4343 backslash escapes are accepted @@ -249,7 +256,7 @@ export class DnsQuery implements IDnsQuery { const { query } = req; const name = query.name; const type = query.type || "TXT"; - + if (typeof name !== "string" || typeof type !== "string") { errorBuilder(res, 400); return; @@ -280,27 +287,32 @@ export class DnsQuery implements IDnsQuery { } if (dnsRequest && dnsRequest.questions) { for (const question of dnsRequest.questions) { - if (await this._domainQueryService.checkBlacklist(request, question.name)) { + if ( + await this._domainQueryService.checkBlacklist( + request, + punycodeDomainPartsToUnicode(question.name), + ) + ) { blockedForLegalReasons(res); return; } } } - + var result; try { result = dnsRequest && (await this.handleDnsQuery(request, dnsRequest)); - } catch(e) { + } catch (e: any) { this._logger.error("dnsqueryGet: error handling dns query", e); res.writeHead(200); - const errorPacket:DoHPacket = { + const errorPacket: DoHPacket = { Question: [], Answer: [], Status: "2", TC: false, - } + }; res.write(JSON.stringify(errorPacket)); - res.end() + res.end(); return; } if (!result) { @@ -325,10 +337,9 @@ export class DnsQuery implements IDnsQuery { if (q.type === "TXT") { decoded_type = 16; } else { - this._logger.error('unhandled question type', - { + this._logger.error("unhandled question type", { ...request, - origin: 'dnsquery', + origin: "dnsquery", context: { question: q.name, type: q.type, @@ -343,6 +354,8 @@ export class DnsQuery implements IDnsQuery { ret.Question.push(tmp); } } + + const cacheConfig = this._configurationService.getCacheConfig(); if (decoded.answers) { for (var a of decoded.answers) { if (a.type !== "TXT") { @@ -362,8 +375,8 @@ export class DnsQuery implements IDnsQuery { //we only know how to handle txt tmp.type = 16; tmp.name = a.name; - tmp.data = a.data.toString(); - tmp.ttl = Number(configuration.cache.ttl); + tmp.data = (a.data as string[]).join(""); + tmp.ttl = Number(cacheConfig.getTtl()); ret.Answer.push(tmp); } } @@ -371,14 +384,14 @@ export class DnsQuery implements IDnsQuery { "Content-Type": "application/x-javascript", }); res.write(JSON.stringify(ret)); - + res.end(); return; } else { - errorBuilder(res, result.code || 500); + errorBuilder(res, result.code || 500, result.message); return; } - }; + }; } const trimTrailingSlashFromPath = (p: string) => { @@ -392,7 +405,7 @@ const trimTrailingSlashFromPath = (p: string) => { } }; -const recordToDnslink = (result: Record): string | null => { +const recordToDnslink = (result: IRecord): string | null => { if (!result) { return null; } else if (result._tag === "ens-socials-redirect") { @@ -414,4 +427,4 @@ const recordToDnslink = (result: Record): string | null => { //totality checking, if result.codec is not never that means Record changed return result.codec; -}; \ No newline at end of file +}; diff --git a/src/expressErrors/index.ts b/packages/dweb-api-server/src/expressErrors/index.ts similarity index 84% rename from src/expressErrors/index.ts rename to packages/dweb-api-server/src/expressErrors/index.ts index 65ed298..2e814f8 100644 --- a/src/expressErrors/index.ts +++ b/packages/dweb-api-server/src/expressErrors/index.ts @@ -24,11 +24,14 @@ export function blockedForLegalReasons(res: Response) { res.end(); } -export function errorBuilder(res: Response, code = 500) { +export function errorBuilder(res: Response, code = 500, message?: string) { res.writeHead(code, { "Content-Type": "text/plain", }); - res.write(`Error ${code}`); + res.write(`Error ${code}\n`); + if (message) { + res.write(message); + } res.end(); } diff --git a/packages/dweb-api-server/src/index.ts b/packages/dweb-api-server/src/index.ts new file mode 100644 index 0000000..37d43ca --- /dev/null +++ b/packages/dweb-api-server/src/index.ts @@ -0,0 +1,20 @@ +import { EnvironmentConfiguration } from "./dependencies/BindingsManager.js"; +import { createApplicationConfigurationBindingsManager } from "./dependencies/services.js"; +import { Server } from "./server/index.js"; + +// Start main worker process + +const services = createApplicationConfigurationBindingsManager(); +const env = EnvironmentConfiguration.Production; +const server = new Server( + services.configuration.getBinding(env), + services.logger.getBinding(env), + services.domainQuery.getBinding(env), + services.ensResolver.getBinding(env), + services.arweaveResolver.getBinding(env), + services.dnsQuery.getBinding(env), + services.domainRateLimit.getBinding(env), + services.hostnameSubstitution.getBinding(env), +); + +server.start(); diff --git a/packages/dweb-api-server/src/scripts/dump_test_cases.ts b/packages/dweb-api-server/src/scripts/dump_test_cases.ts new file mode 100644 index 0000000..e48481b --- /dev/null +++ b/packages/dweb-api-server/src/scripts/dump_test_cases.ts @@ -0,0 +1,123 @@ +import "reflect-metadata"; +import { + TestConfigurationService, + getDefaultServerConfiguration, +} from "../configuration"; +import { + ArweaveResolver, + arweaveTxIdToArweaveSandboxSubdomainId, +} from "dweb-api-resolver/dist/resolver/arweave"; +import { LoggerService } from "dweb-api-logger/dist/index"; +import { EnsService } from "dweb-api-resolver/dist/nameservice/EnsService"; + +const configurationService = new TestConfigurationService(); +//this is a hack to ensure certain values (i.e. no logging) on the configuration service +//logging is disabled for easy piping +configurationService.set((conf) => { + const cfg = (getDefaultServerConfiguration() as any) + ._innerConfigurationObject; + conf.ens = cfg.ens; + conf.arweave = cfg.arweave; + conf.ipfs = cfg.ipfs; + conf.swarm = cfg.swarm; + conf.logging = cfg.logging; + conf.logging.level = "none"; + conf.cache = cfg.cache; + conf.cache.ttl = 5; + conf.router = cfg.router; + conf.ethereum = cfg.ethereum; +}); + +const loggerService = new LoggerService(configurationService); +const ensService = new EnsService(configurationService, loggerService); +const arweaveService = new ArweaveResolver(loggerService); + +const testCases = [ + { + name: "blockranger.eth", + type: "ipfs", + }, + { + name: "fast-ipfs.eth", + type: "ipfs", + }, + { + name: "surveychain.eth", + type: "ipfs", + }, + { + name: "easy-rln.eth", + type: "ipfs", + }, + { + name: "view-code.eth", + type: "ipfs", + }, + { + name: "makesy.eth", + type: "arweave", + }, + { + name: "swarm.eth", + type: "swarm", + }, + { + name: "nick.eth", + type: "ipns", + }, + { + name: "not-a-real-ens-name-ahsalabadkadvhda.eth", //don't register this lmao + type: "none", + }, +]; + +const main = async () => { + const results = []; + for (const testCase of testCases) { + const request = { + trace_id: "TEST_TRACE_ID", + }; + const contentHash = await ensService.getContentHash(request, testCase.name); + const additionalInfo: Partial<{ + arweave: { + result: string; + query: string; + subdomain_sandbox_id: string; + }; + }> = {}; + if (testCase.type === "arweave") { + if (!contentHash) { + throw "arweave result is null"; + } + const ar_id = contentHash.split("arweave://")[1]; + const arweaveResult = await arweaveService.resolveArweave( + request, + ar_id, + testCase.name, + ); + const subdomain_sandbox_id = await arweaveTxIdToArweaveSandboxSubdomainId( + request, + loggerService, + ar_id, + ); + if (!subdomain_sandbox_id) { + throw "subdomain_sandbox_id is null"; + } + additionalInfo.arweave = { + result: arweaveResult, + query: ar_id, + subdomain_sandbox_id, + }; + } + + results.push({ + name: testCase.name, + type: testCase.type, + contentHash: contentHash, + additionalInfo, + }); + } + console.log(JSON.stringify(results, null, 2)); +}; + +main(); diff --git a/src/server/index.ts b/packages/dweb-api-server/src/server/index.ts similarity index 60% rename from src/server/index.ts rename to packages/dweb-api-server/src/server/index.ts index dfc5942..89b67f2 100644 --- a/src/server/index.ts +++ b/packages/dweb-api-server/src/server/index.ts @@ -2,26 +2,26 @@ import express, { Request, Response } from "express"; import { IEnsResolverService, IEnsResolverServiceResolveEnsRet, - Record, -} from "../services/EnsResolverService"; + IRecord, +} from "dweb-api-types/dist/ens-resolver"; import bodyParser from "body-parser"; -import { notSupported, blockedForLegalReasons, noContentHashSet } from "../expressErrors"; +import { + notSupported, + blockedForLegalReasons, + noContentHashSet, +} from "../expressErrors"; import cors from "cors"; import { IDomainQueryService } from "../services/DomainsQueryService"; -import { DITYPES } from "../dependencies/types"; import { punycodeDomainPartsToUnicode } from "../utils/punycodeConverter"; -import { recordToProxyRecord } from "../services/EnsResolverService/utils"; -import { inject, injectable } from "inversify"; -import { IConfigurationService } from "../configuration"; -import { ILoggerService } from "../services/LoggerService"; +import { ILoggerService } from "dweb-api-types/dist/logger"; import { IDnsQuery } from "../dnsquery"; -import { IArweaveResolver } from "../services/EnsResolverService/arweave"; +import { IArweaveResolver } from "dweb-api-types/dist/arweave"; import { getDomainOfRequestFromGet, getTraceIdFromRequest } from "../utils"; import { IDomainRateLimitService } from "../services/DomainRateLimit"; -import { IRequestContext } from "../services/lib"; -import { ParamsDictionary } from "express-serve-static-core"; -import { ParsedQs } from "qs"; -import { IHostnameSubstitutionService } from "../services/HostnameSubstitutionService"; +import { IRequestContext } from "dweb-api-types/dist/request-context"; +import { recordToProxyRecord } from "dweb-api-resolver/dist/resolver/utils"; +import { ServerConfiguration } from "../configuration"; +import { IHostnameSubstitutionService } from "dweb-api-resolver/dist/HostnameSubstitutionService/index"; interface ProxyServerErrorNotSupported { _type: "ProxyServerNotSupported"; @@ -38,7 +38,11 @@ interface ProxyServerErrorTag { } type ProxyServerError = ProxyServerErrorTag & - (ProxyServerErrorBlacklisted | ProxyServerErrorNotSupported | ProxyServerInternalServerError); + ( + | ProxyServerErrorBlacklisted + | ProxyServerErrorNotSupported + | ProxyServerInternalServerError + ); type ProxyServerLogicRet = | ProxyServerError @@ -53,9 +57,8 @@ const proxyExpress = express(); dnsqueryExpress.use(cors()); -@injectable() export class Server { - _configurationService: IConfigurationService; + _configurationService: ServerConfiguration; _logger: ILoggerService; _domainQueryService: IDomainQueryService; _DnsQuery: IDnsQuery; @@ -65,14 +68,14 @@ export class Server { _hostnameSubstitutionService: IHostnameSubstitutionService; constructor( - @inject(DITYPES.ConfigurationService) configurationService: IConfigurationService, - @inject(DITYPES.LoggerService) logger: ILoggerService, - @inject(DITYPES.DomainQueryService) domainQueryService: IDomainQueryService, - @inject(DITYPES.EnsResolverService) ensResolverService: IEnsResolverService, - @inject(DITYPES.ArweaveResolver) arweaveResolver: IArweaveResolver, - @inject(DITYPES.DnsQuery) dnsQuery: IDnsQuery, - @inject(DITYPES.DomainRateLimitService) domainRateLimitService: IDomainRateLimitService, - @inject(DITYPES.HostnameSubstitutionService) hostnameSubstitutionService: IHostnameSubstitutionService, + configurationService: ServerConfiguration, + logger: ILoggerService, + domainQueryService: IDomainQueryService, + ensResolverService: IEnsResolverService, + arweaveResolver: IArweaveResolver, + dnsQuery: IDnsQuery, + domainRateLimitService: IDomainRateLimitService, + hostnameSubstitutionService: IHostnameSubstitutionService, ) { this._configurationService = configurationService; this._logger = logger; @@ -84,15 +87,19 @@ export class Server { this._hostnameSubstitutionService = hostnameSubstitutionService; } - requestHandler = async (request: IRequestContext, content: Record, req: Request, res: Response) => { - const configuration = this._configurationService.get(); + requestHandler = async ( + request: IRequestContext, + content: IRecord, + req: Request, + res: Response, + ) => { if (!content) { - this._logger.debug('no content', { + this._logger.debug("no content", { ...request, origin: "requestHandler", context: { host: req.get("host"), - } + }, }); notSupported(res); return; @@ -104,57 +111,72 @@ export class Server { content.codec === "arweave-ns" || content.codec === "swarm" ) { - const proxyContent = await recordToProxyRecord(request, this._configurationService, this._logger, content); - this._logger.debug('content supported', { + const proxyContent = await recordToProxyRecord( + request, + this._configurationService, + this._logger, + content, + ); + this._logger.debug("content supported", { ...request, origin: "requestHandler", context: { host: req.get("host"), content: proxyContent, - } + }, }); - if(proxyContent._tag === "ens-socials-redirect" && !configuration.ens.socialsEndpointEnabled) { - this._logger.debug('no content hash set', { + if ( + (proxyContent._tag === "ens-socials-redirect" && + !this._configurationService.getConfigEnsSocialsEndpoint() + .getEnsSocialsEndpoint) || + proxyContent._tag === "ProxyRecordUnableToRedirect" + ) { + this._logger.debug("no content hash set", { ...request, origin: "requestHandler", context: { host: req.get("host"), - } + }, }); noContentHashSet(res); return; } - const xContentLocation = proxyContent.XContentLocation.replace(/\/+$/, ""); // remove trailing slashes + const xContentLocation = proxyContent.XContentLocation.replace( + /\/+$/, + "", + ); // remove trailing slashes const xContentLocationWithoutProtocol = xContentLocation.replace( /^[^:]+:\/\//, "", ); // remove protocol - this._logger.debug('proxying content', { + this._logger.debug("proxying content", { ...request, origin: "requestHandler", context: { host: req.get("host"), location: xContentLocationWithoutProtocol, path: proxyContent.XContentPath, - } + }, }); res.writeHead(200, { "X-Content-Location": xContentLocationWithoutProtocol, "X-Content-Path": proxyContent.XContentPath, - "X-Content-Storage-Type": content._tag === "ens-socials-redirect" ? undefined : content.codec, + "X-Content-Storage-Type": + content._tag === "ens-socials-redirect" + ? undefined + : (proxyContent.overrideCodecHeader ?? content.codec), }); res.end(); return; } - + let _exhaustiveCheck: never = content.codec; return _exhaustiveCheck; - } + }; parseHostnameFromRequest = (req: Request, res: Response) => { - const configuration = this._configurationService.get(); const host = req.headers["host"]; if (!host) { throw "Unexpected host header not set"; @@ -173,14 +195,17 @@ export class Server { ): Promise => { var hostname; try { - hostname = await this._domainQueryService.checkLinkedDomains(request, unprocessedHostname); + hostname = await this._domainQueryService.checkLinkedDomains( + request, + unprocessedHostname, + ); } catch (e) { - this._logger.error('caught error when checking linked domains', { + this._logger.error("caught error when checking linked domains", { ...request, origin: "proxyServerLogic", context: { error: e, - } + }, }); return { _tag: "ProxyServerError", @@ -195,14 +220,17 @@ export class Server { } var blacklisted = false; try { - blacklisted = await this._domainQueryService.checkBlacklist(request, hostname) - } catch(e) { - this._logger.error('caught error when checking blacklist', { + blacklisted = await this._domainQueryService.checkBlacklist( + request, + hostname, + ); + } catch (e) { + this._logger.error("caught error when checking blacklist", { ...request, origin: "proxyServerLogic", context: { error: e, - } + }, }); return { _tag: "ProxyServerError", @@ -216,14 +244,14 @@ export class Server { }; } let location = await this._ensResolverService.resolveEns(request, hostname); - this._logger.debug('resolved ens', { + this._logger.debug("resolved ens", { ...request, origin: "proxyServerLogic", context: { hostname: hostname, location: location, - } - }) + }, + }); if (!location) { return { _tag: "ProxyServerError", @@ -237,26 +265,23 @@ export class Server { } }; - proxyServer = async ( - req: Request, - res: Response, - ): Promise => { + proxyServer = async (req: Request, res: Response): Promise => { var hostname: string | null = this.parseHostnameFromRequest(req, res); var isError: ProxyServerLogicRet; const trace_id = getTraceIdFromRequest(req); - const request:IRequestContext = { + const request: IRequestContext = { trace_id, - } + }; try { isError = await this.proxyServerLogic(request, hostname); - } catch(e) { - this._logger.error('unrecoverable error', { + } catch (e) { + this._logger.error("unrecoverable error", { ...request, origin: "proxyServer", context: { error: e, hostname: hostname, - } + }, }); res.status(500); res.end(); @@ -264,12 +289,12 @@ export class Server { } if (isError._tag === "ProxyServerError") { if (isError._type === "ProxyServerErrorBlacklisted") { - this._logger.debug('hostname is blacklisted', { + this._logger.debug("hostname is blacklisted", { ...request, origin: "proxyServer", context: { hostname: hostname, - } + }, }); blockedForLegalReasons(res); return null; @@ -279,7 +304,7 @@ export class Server { origin: "proxyServer", context: { hostname: hostname, - } + }, }); notSupported(res); return null; @@ -289,25 +314,25 @@ export class Server { origin: "proxyServer", context: { hostname: hostname, - } + }, }); res.status(500); res.end(); return null; } } else { - this._logger.debug('content supported', { + this._logger.debug("content supported", { ...request, origin: "proxyServer", context: { hostname: hostname, content: isError.ret, - } + }, }); this.requestHandler(request, isError.ret.record, req, res); return null; } - + let _exhaustiveCheck: never = isError; return _exhaustiveCheck; }; @@ -318,26 +343,60 @@ export class Server { 200 -> success, otherwise no cert */ public caddy = async (req: Request, res: Response): Promise => { - const configuration = this._configurationService.get(); - const hostname = getDomainOfRequestFromGet(this._hostnameSubstitutionService, req, "domain"); + const askEndpointConfig = this._configurationService.getConfigAskEndpoint(); + const socialsEndpointConfig = + this._configurationService.getConfigEnsSocialsEndpoint(); + + const hostname_obj = getDomainOfRequestFromGet( + this._hostnameSubstitutionService, + req, + "domain", + ); const trace_id = getTraceIdFromRequest(req); - const request:IRequestContext = { + const request: IRequestContext = { trace_id, + }; + if ( + !hostname_obj || + hostname_obj.domain_without_suffix_substitutions.length > 256 + ) { + notSupported(res); + return null; } - if (typeof hostname !== "string" || hostname.length > 512) { + + const hostname = hostname_obj.domain; + + const domain_label_count = + hostname_obj.domain_without_suffix_substitutions.split(".").length; + if (domain_label_count > askEndpointConfig.getMaxLabelLimit()) { + this._logger.info("domain label count exceeded", { + ...request, + origin: "caddy", + context: { + hostname, + originally_requested_hostname: + hostname_obj.domain_without_suffix_substitutions, + domain_label_count, + }, + }); notSupported(res); return null; } - if(configuration.ask.rate.enabled && !req.headers['x-health-check']) { - const rateLimited = await this._domainRateLimitService.incrementRateLimit(request, hostname, configuration.ask.rate.limit, configuration.ask.rate.period); - if(rateLimited.countOverMax) { - this._logger.error('rate limited', { + if (askEndpointConfig.getRateEnabled() && !req.headers["x-health-check"]) { + const rateLimited = await this._domainRateLimitService.incrementRateLimit( + request, + hostname, + askEndpointConfig.getRateLimit(), + askEndpointConfig.getRatePeriod(), + ); + if (rateLimited.countOverMax) { + this._logger.error("rate limited", { ...request, origin: "caddy", context: { hostname: hostname, - } + }, }); res.status(429); res.end(); @@ -345,17 +404,17 @@ export class Server { } } - var isError:ProxyServerLogicRet; + var isError: ProxyServerLogicRet; try { isError = await this.proxyServerLogic(request, hostname); - } catch(e) { - this._logger.error('unrecoverable error', { + } catch (e) { + this._logger.error("unrecoverable error", { ...request, origin: "caddy", context: { hostname: hostname, error: e, - } + }, }); res.status(500); res.end(); @@ -375,7 +434,11 @@ export class Server { return null; } } else { - if((isError.ret.record?._tag === "ens-socials-redirect" || !isError.ret.resolverExists) && !configuration.ens.socialsEndpointEnabled) { + if ( + (isError.ret.record?._tag === "ens-socials-redirect" || + !isError.ret.resolverExists) && + !socialsEndpointConfig.getEnsSocialsEndpoint + ) { res.status(404); } else { res.status(200); @@ -386,52 +449,73 @@ export class Server { return isError; //this should be of type never, otherwise there's an unexhausted codepath }; start = () => { - const configuration = this._configurationService.get(); + const routerConfig = this._configurationService.getRouterConfig(); + const dnsqueryRouterConfig = + this._configurationService.getDnsqueryRouterConfig(); + const askRouterConfig = this._configurationService.getAskRouterConfig(); proxyExpress.all("*", this.proxyServer.bind(this)); - proxyExpress.listen(configuration.router.listen); + proxyExpress.listen(routerConfig.getRouterListenPort()); dnsqueryExpress.post( "/dns-query", [bodyParser.raw({ type: "application/dns-message", limit: "2kb" })], async (req: Request, res: Response) => { - await this._DnsQuery.dnsqueryPost(req, res); - } + await this._DnsQuery.dnsqueryPost(req, res).catch((e) => { + this._logger.error("caught error in dnsqueryPost", { + trace_id: getTraceIdFromRequest(req), + origin: "dnsqueryPost", + context: { + error: e, + }, + }); + res.status(500); + res.end(); + }); + }, ); dnsqueryExpress.get( "/dns-query", [bodyParser.json({ limit: "2kb" })], async (req: Request, res: Response) => { - await this._DnsQuery.dnsqueryGet(req, res); - } + await this._DnsQuery.dnsqueryGet(req, res).catch((e) => { + this._logger.error("caught error in dnsqueryGet", { + trace_id: getTraceIdFromRequest(req), + origin: "dnsqueryGet", + context: { + error: e, + }, + }); + res.status(500); + res.end(); + }); + }, ); - if (configuration.dnsquery.enabled) { - dnsqueryExpress.listen(configuration.dnsquery.listen, () => { - this._logger.info( - 'DNS query server started', - { + if (dnsqueryRouterConfig.getDnsqueryRouterEnabled()) { + dnsqueryExpress.listen( + dnsqueryRouterConfig.getDnsqueryRouterListenPort(), + () => { + this._logger.info("DNS query server started", { trace_id: "UNDEFINED_TRACE_ID", origin: "start", context: { - listen: configuration.dnsquery.listen, - } - } - ); - }); + listen: dnsqueryRouterConfig.getDnsqueryRouterListenPort(), + }, + }); + }, + ); } - switch (configuration.ask.enabled) { - case "true": + switch (askRouterConfig.getAskRouterEnabled()) { + case true: askExpress.get("/ask", this.caddy.bind(this)); - askExpress.listen(configuration.ask.listen, () => { - this._logger.info( - 'Ask server started', - { - trace_id: "UNDEFINED_TRACE_ID", - origin: "start", - context: { - listen: configuration.ask.listen, - } - }); + askExpress.listen(askRouterConfig.getAskRouterListenPort(), () => { + this._logger.info("Ask server started", { + trace_id: "UNDEFINED_TRACE_ID", + origin: "start", + context: { + listen: askRouterConfig.getAskRouterListenPort(), + }, + }); }); break; } }; -} \ No newline at end of file +} diff --git a/packages/dweb-api-server/src/services/DomainRateLimit/DomainRateLimit.spec.ts b/packages/dweb-api-server/src/services/DomainRateLimit/DomainRateLimit.spec.ts new file mode 100644 index 0000000..943f24d --- /dev/null +++ b/packages/dweb-api-server/src/services/DomainRateLimit/DomainRateLimit.spec.ts @@ -0,0 +1,100 @@ +import { expect } from "chai"; +import { describe, it } from "mocha"; +import { SinonStubbedInstance, createStubInstance } from "sinon"; +import { IRequestContext } from "dweb-api-types/dist/request-context.js"; +import { IRedisClient } from "dweb-api-types/dist/redis.js"; +import { ILoggerService } from "dweb-api-types/dist/logger.js"; +import { RedisClient } from "dweb-api-cache/dist/index.js"; +import { DomainRateLimitService } from "."; +import { LoggerService } from "dweb-api-logger/dist/index.js"; + +describe("DomainRateLimitService", () => { + let redisClient: SinonStubbedInstance; + let logger: ILoggerService; + let service: DomainRateLimitService; + + beforeEach(() => { + redisClient = createStubInstance(RedisClient); + logger = createStubInstance(LoggerService); + service = new DomainRateLimitService(redisClient as any, logger as any); + }); + + it("should increment rate limit and set TTL if not already set", async () => { + const domain = "test.com"; + const maxQueries = 10; + const intervalInSeconds = 60; + + redisClient.incr.resolves(1); + redisClient.ttl.resolves(-1); + + const request: IRequestContext = { + trace_id: "TEST_TRACE_ID", + }; + + const result = await service.incrementRateLimit( + request, + domain, + maxQueries, + intervalInSeconds, + ); + + expect(result.countOverMax).to.be.false; + expect(result.count).to.equal(1); + expect(result.ttl).to.equal(intervalInSeconds); + expect( + redisClient.expire.calledOnceWith( + `rate_limit/${domain}`, + intervalInSeconds, + ), + ).to.be.true; + }); + + it("should increment rate limit and not modify TTL if already set", async () => { + const domain = "test.com"; + const maxQueries = 10; + const intervalInSeconds = 60; + + const request: IRequestContext = { + trace_id: "TEST_TRACE_ID", + }; + + redisClient.incr.resolves(2); + redisClient.ttl.resolves(30); + + const result = await service.incrementRateLimit( + request, + domain, + maxQueries, + intervalInSeconds, + ); + expect(result.countOverMax).to.be.false; + expect(result.count).to.equal(2); + expect(result.ttl).to.equal(30); + expect(redisClient.expire.notCalled).to.be.true; + }); + + it("should indicate when count is over max queries", async () => { + const domain = "test.com"; + const maxQueries = 10; + const intervalInSeconds = 60; + + const request: IRequestContext = { + trace_id: "TEST_TRACE_ID", + }; + + redisClient.incr.resolves(11); + redisClient.ttl.resolves(30); + + const result = await service.incrementRateLimit( + request, + domain, + maxQueries, + intervalInSeconds, + ); + + expect(result.countOverMax).to.be.true; + expect(result.count).to.equal(11); + expect(result.ttl).to.equal(30); + expect(redisClient.expire.notCalled).to.be.true; + }); +}); diff --git a/packages/dweb-api-server/src/services/DomainRateLimit/index.ts b/packages/dweb-api-server/src/services/DomainRateLimit/index.ts new file mode 100644 index 0000000..70b32b0 --- /dev/null +++ b/packages/dweb-api-server/src/services/DomainRateLimit/index.ts @@ -0,0 +1,66 @@ +import { IRedisClient } from "dweb-api-types/dist/redis"; +import { ILoggerService } from "dweb-api-types/dist/logger"; +import { IRequestContext } from "dweb-api-types/dist/request-context"; + +export interface IDomainRateLimitServiceRet { + countOverMax: boolean; + count: number; + ttl: number; +} + +export interface IDomainRateLimitService { + incrementRateLimit( + request: IRequestContext, + domain: string, + maxQueries: number, + intervalInSeconds: number, + ): Promise; +} + +export class DomainRateLimitService implements IDomainRateLimitService { + _redisClient: IRedisClient; + _logger: ILoggerService; + + constructor(redisClient: IRedisClient, logger: ILoggerService) { + this._redisClient = redisClient; + this._logger = logger; + } + + async incrementRateLimit( + request: IRequestContext, + domain: string, + maxQueries: number, + intervalInSeconds: number, + ): Promise { + const key = `rate_limit/${domain}`; + const count = await this._redisClient.incr(key); + var ttl = await this._redisClient.ttl(key); + this._logger.debug("Rate limit incremented", { + ...request, + origin: "DomainRateLimitService", + context: { + key: key, + count: count, + ttl: ttl, + }, + }); + if (ttl < 1) { + await this._redisClient.expire(key, intervalInSeconds); + ttl = intervalInSeconds; + this._logger.debug(`Rate limit expired, setting new TTL`, { + ...request, + origin: "DomainRateLimitService", + context: { + key: key, + count: count, + ttl: ttl, + }, + }); + } + return { + countOverMax: count > maxQueries, + count: count, + ttl: ttl, + }; + } +} diff --git a/src/services/DomainsQueryService/index.ts b/packages/dweb-api-server/src/services/DomainsQueryService/index.ts similarity index 56% rename from src/services/DomainsQueryService/index.ts rename to packages/dweb-api-server/src/services/DomainsQueryService/index.ts index 303491b..76e65c4 100644 --- a/src/services/DomainsQueryService/index.ts +++ b/packages/dweb-api-server/src/services/DomainsQueryService/index.ts @@ -1,21 +1,21 @@ import superagent, { Response } from "superagent"; -import { ILoggerService } from "../LoggerService"; -import { inject, injectable } from "inversify"; -import { DITYPES } from "../../dependencies/types"; +import { ILoggerService } from "dweb-api-types/dist/logger"; import { hostnameIsENSTLD } from "../../utils"; -import { ICacheService } from "../CacheService"; +import { ICacheService } from "dweb-api-types/dist/cache"; import * as z from "zod"; -import { IConfigurationService } from "../../configuration"; -import { IRequestContext } from "../lib"; +import { IRequestContext } from "dweb-api-types/dist/request-context"; +import { IDomainQueryConfig } from "dweb-api-types/dist/config"; //passthrough because we want to preserve the full query response -const DomainQueryValueCodec = z.object({ - blacklisted: z.boolean().optional(), - domain: z.string(), - canonical_name: z.string().optional(), -}).passthrough(); +const DomainQueryValueCodec = z + .object({ + blacklisted: z.boolean().optional(), + domain: z.string(), + canonical_name: z.string().optional(), + }) + .passthrough(); -const createDefaultDomainQueryValue = (domain:string): DomainQueryValue => ({ +const createDefaultDomainQueryValue = (domain: string): DomainQueryValue => ({ domain, blacklisted: false, canonical_name: undefined, @@ -24,55 +24,78 @@ const createDefaultDomainQueryValue = (domain:string): DomainQueryValue => ({ export type DomainQueryValue = z.infer; export interface IDomainQueryService { - domainQuery: (request: IRequestContext, domain: string) => Promise; - checkBlacklist: (request: IRequestContext, domain: string) => Promise; - checkLinkedDomains: (request: IRequestContext, domain: string) => Promise; + domainQuery: ( + request: IRequestContext, + domain: string, + ) => Promise; + checkBlacklist: ( + request: IRequestContext, + domain: string, + ) => Promise; + checkLinkedDomains: ( + request: IRequestContext, + domain: string, + ) => Promise; } //shallow mock for testing export interface IDomainQuerySuperagentService { - query: (domain: string) => Promise; + query: (domain: string) => Promise; } -@injectable() export class DomainQuerySuperagentService implements IDomainQuerySuperagentService { - _configurationService: IConfigurationService; - constructor(@inject(DITYPES.ConfigurationService) configurationService: IConfigurationService) { + _configurationService: IDomainQueryConfig; + constructor(configurationService: IDomainQueryConfig) { this._configurationService = configurationService; } public async query(domain: string) { - const configuration = this._configurationService.get(); + const configuration = this._configurationService.getDomainQueryConfig(); + if (!configuration) { + return null; + } return await superagent - .get(`${configuration.domainsapi.endpoint}/query`) + .get(`${configuration.getDomainsApiEndpoint()}/query`) .query({ domain }) .ok((res) => res.status < 500); } } -@injectable() export class DomainQueryService implements IDomainQueryService { _logger: ILoggerService; _superAgentSvc: IDomainQuerySuperagentService; _cacheService: ICacheService; - _configurationService: IConfigurationService; + _configurationService: IDomainQueryConfig | null; constructor( - @inject(DITYPES.LoggerService) logger: ILoggerService, - @inject(DITYPES.DomainQuerySuperagentService) superAgentSvc: IDomainQuerySuperagentService, - @inject(DITYPES.CacheService) cacheService: ICacheService, - @inject(DITYPES.ConfigurationService) configurationService: IConfigurationService, + logger: ILoggerService, + superAgentSvc: IDomainQuerySuperagentService, + cacheService: ICacheService, + configurationService: IDomainQueryConfig | null, ) { this._logger = logger; this._superAgentSvc = superAgentSvc; this._cacheService = cacheService; this._configurationService = configurationService; } - domainQuery = async (request: IRequestContext, domain: string): Promise => this._cacheService.memoize(request, () => this.domainQueryInternal(request, domain), DomainQueryValueCodec, "domainQuery", domain) - domainQueryInternal = async (request: IRequestContext, domain: string): Promise => { - const configuration = this._configurationService.get(); - if (!configuration.domainsapi.endpoint) { + domainQuery = async ( + request: IRequestContext, + domain: string, + ): Promise => + this._cacheService.memoize( + request, + () => this.domainQueryInternal(request, domain), + DomainQueryValueCodec, + "domainQuery", + domain, + ); + domainQueryInternal = async ( + request: IRequestContext, + domain: string, + ): Promise => { + const configuration = this._configurationService?.getDomainQueryConfig(); + if (!configuration) { this._logger.error( "domainQuery: configuration.domainsapi.endpoint is not set, short circuiting checks", { @@ -81,7 +104,7 @@ export class DomainQueryService implements IDomainQueryService { context: { domain, }, - } + }, ); return createDefaultDomainQueryValue(domain); } @@ -93,31 +116,40 @@ export class DomainQueryService implements IDomainQueryService { return createDefaultDomainQueryValue(domain); } + const endpoint = configuration.getDomainsApiEndpoint(); const ret = await this._superAgentSvc.query(domain); + if (!ret) { + this._logger.debug("query returned null", { + ...request, + origin: "DomainQueryService", + context: { + domain, + endpoint, + }, + }); + return createDefaultDomainQueryValue(domain); + } if (ret.error) { - if(ret.error.status === 404) { - this._logger.debug('serving default value', { + if (ret.error.status === 404) { + this._logger.debug("serving default value", { ...request, - origin: 'DomainQueryService', + origin: "DomainQueryService", context: { domain, - } + }, }); } else { - this._logger.error( - 'endpoint returned error', - { - ...request, - origin: 'DomainQueryService', - context: { - domain, - status: ret.error.status, - message: ret.error.message, - path: ret.error.path, - endpoint: configuration.domainsapi.endpoint, - } - } - ); + this._logger.error("endpoint returned error", { + ...request, + origin: "DomainQueryService", + context: { + domain, + status: ret.error.status, + message: ret.error.message, + path: ret.error.path, + endpoint: endpoint, + }, + }); //FIXME: might be necessary to assume blacklist if we get a 500? we need to pass a value that doesn't get cached } return createDefaultDomainQueryValue(domain); @@ -127,13 +159,13 @@ export class DomainQueryService implements IDomainQueryService { const payload = DomainQueryValueCodec.parse(json); return payload; } catch (e) { - this._logger.error('failed to deserialize response', { + this._logger.error("failed to deserialize response", { ...request, - origin: 'DomainQueryService', + origin: "DomainQueryService", context: { domain, text: ret.text, - } + }, }); return createDefaultDomainQueryValue(domain); } @@ -144,7 +176,7 @@ export class DomainQueryService implements IDomainQueryService { if (!domain) { return false; } - + const explode = domain.split("."); for (var i = 0; i < explode.length; i++) { @@ -160,10 +192,14 @@ export class DomainQueryService implements IDomainQueryService { /** * precondition: domain must eventually terminate with a canonical_name entry ending in .eth */ - checkLinkedDomains = async (request: IRequestContext, domain: string): Promise => { - const configuration = this._configurationService.get(); + checkLinkedDomains = async ( + request: IRequestContext, + domain: string, + ): Promise => { + const configuration = this._configurationService?.getDomainQueryConfig(); //cycle detection is inbuilt via max_hops, if max_hops is expected to be large then needs hashmap of visited domains - var tries = configuration.domainsapi.max_hops; + //if configuration is null, max hops doesn't matter + var tries = configuration?.getMaxHops() || 15; var found = false; if (!domain) { return null; @@ -182,40 +218,38 @@ export class DomainQueryService implements IDomainQueryService { tries = tries - 1; } while (tries > 0 && !found); if (!found) { - this._logger.error( - 'domain queried but no canonical_name link exists', - { - ...request, - origin: 'DomainQueryService', - context: { - domain, - search, - } - } - ); + this._logger.error("domain queried but no canonical_name link exists", { + ...request, + origin: "DomainQueryService", + context: { + domain, + search, + }, + }); } return search; }; } -@injectable() -export class TestDomainQuerySuperagentService implements IDomainQuerySuperagentService { +export class TestDomainQuerySuperagentService + implements IDomainQuerySuperagentService +{ blacklistMap: Map; canonicalNameMap: Map; public error = false; setBlacklist = (domain: string, blacklisted: boolean) => { this.blacklistMap.set(domain, blacklisted); - } + }; setCanonicalName = (domain: string, canonicalName: string) => { this.canonicalNameMap.set(domain, canonicalName); - } + }; constructor() { this.blacklistMap = new Map(); this.canonicalNameMap = new Map(); } query = async (domain: string) => { - if(this.error) { + if (this.error) { return { error: { status: 500, @@ -234,4 +268,4 @@ export class TestDomainQuerySuperagentService implements IDomainQuerySuperagentS }), } as any as Response; }; -} \ No newline at end of file +} diff --git a/packages/dweb-api-server/src/services/KuboApiService/index.ts b/packages/dweb-api-server/src/services/KuboApiService/index.ts new file mode 100644 index 0000000..af9ad22 --- /dev/null +++ b/packages/dweb-api-server/src/services/KuboApiService/index.ts @@ -0,0 +1,107 @@ +import { ILoggerService } from "dweb-api-types/dist/logger"; +import superagent from "superagent"; +import { IRequestContext } from "dweb-api-types/dist/request-context"; +import { normalizeUrlFragmentForIpfsSubdomainGateway } from "dweb-api-resolver/dist/resolver/utils"; +import { IConfigurationKubo } from "dweb-api-types/dist/config"; + +export class KuboApiService { + private loggerService: ILoggerService; + private configurationService: IConfigurationKubo; + private agent: superagent.SuperAgentStatic; + + constructor( + loggerService: ILoggerService, + configurationService: IConfigurationKubo, + ) { + this.loggerService = loggerService; + this.configurationService = configurationService; + this.agent = superagent; + const koboConfig = this.configurationService.getKuboConfiguration(); + const koboUrl = koboConfig.getKuboApiUrl(); + const logObject = { + trace_id: "UNKNOWN_TRACE_ID", + origin: "KuboApiService", + context: { + url: koboUrl, + }, + }; + if (!koboUrl) { + this.loggerService.info( + "Kubo API URL is not set, all requests will be ignored.", + logObject, + ); + } else { + this.loggerService.info(`Kubo API URL is set`, logObject); + } + } + + async resolveIpnsName( + request: IRequestContext, + name: string, + ): Promise { + try { + const koboConfig = this.configurationService.getKuboConfiguration(); + const url = koboConfig.getKuboApiUrl(); + const auth = koboConfig.getKuboAuth(); + const timeout = koboConfig.getKuboTimeoutMs(); + if (!url) { + this.loggerService.debug("Superagent is not initialized", { + ...request, + origin: "KuboApiService", + context: { + name, + }, + }); + return null; + } + var superagentRequest = this.agent( + "POST", + `${url}api/v0/name/resolve`, + ).query({ + arg: normalizeUrlFragmentForIpfsSubdomainGateway( + name.split("ipns://")[1], + ), + recursive: true, + nocache: true, + "dht-record-count": 16, + "dht-timeout": "1m0s", + stream: false, + }); + + if (auth) { + superagentRequest = superagentRequest.set( + "Authorization", + `Basic ${auth}`, + ); + } + + const response = await superagentRequest + .timeout(timeout || 5000) + .ok((_res) => true); + + if (response.error) { + this.loggerService.debug("Failed to resolve IPNS name", { + ...request, + origin: "KuboApiService", + context: { + name, + response: response, + }, + }); + return null; + } else { + return response.body?.Path || null; + } + } catch (error) { + this.loggerService.error("failed to statically resolve IPNS name", { + ...request, + origin: "KuboApiService", + context: { + name, + error: error, + }, + }); + return null; + } + } +} diff --git a/packages/dweb-api-server/src/test/TestCaseGenerator.ts b/packages/dweb-api-server/src/test/TestCaseGenerator.ts new file mode 100644 index 0000000..9a863b7 --- /dev/null +++ b/packages/dweb-api-server/src/test/TestCaseGenerator.ts @@ -0,0 +1,98 @@ +type PossibleTypes = boolean | string; + +type InputObject = { [key: string]: PossibleTypes[] }; + +export function cartesianProduct( + input: InputObject, +): Array<{ [x: string]: PossibleTypes }> { + const keys = Object.keys(input); + const result = keys.reduce( + (product, key) => { + const newProduct: any[] = []; + for (const obj of product) { + for (const value of input[key]) { + newProduct.push({ ...obj, [key]: value }); + } + } + return newProduct; + }, + [{}], + ); + return result; +} + +function excludeProperties( + obj: T, + keys: K[], +): Omit { + let result = { ...obj }; + keys.forEach((key) => delete result[key]); + return result; +} + +export class TestRunner { + _testCases: (T & { options: O })[]; + runners: { name: string; run: (thisvar: Mocha.Suite) => () => void }[] = []; + + constructor(testCases: (T & { options: O })[]) { + this._testCases = testCases; + } + + registerTests( + description: string, + inputToLog: K[], + optionsToExclude: X[], + filterFunction: (testCase: T & { options: Omit }) => boolean, + func: (testCase: T & { options: Omit }) => Promise, + ): void { + const cases = this._testCases + .map((testCase: T & { options: O }) => { + const options = JSON.parse( + JSON.stringify(testCase.options), + ) as typeof testCase.options; + const newOptions = excludeProperties(options, optionsToExclude); + const ret: T & { options: Omit } = { + ...testCase, + options: newOptions, + }; + return ret; + }) + .filter( + (testCase, index, self) => + index === + self.findIndex((t) => JSON.stringify(t) === JSON.stringify(testCase)), + ) + .flatMap((x) => x) + .filter(filterFunction); + + const runner = cases.map((testCase) => { + const nameBuilder: string[] = []; + Object.keys(testCase).forEach((key) => { + if (inputToLog.includes(key as K)) { + nameBuilder.push( + `${key}: ${JSON.stringify((testCase as unknown as any)[key])}`, + ); + } + }); + Object.keys(testCase.options).forEach((key) => { + nameBuilder.push( + `${key}: ${JSON.stringify((testCase.options as unknown as any)[key])}`, + ); + }); + const name = description + " (" + nameBuilder.join(", ") + ")"; + return { + name, + run: (thisvar: Mocha.Suite) => func.bind(thisvar, testCase), + }; + }); + this.runners.push(...runner); + } + + runTests(thisvar: Mocha.Suite): void { + let runner = this.runners.shift(); + while (runner) { + it(runner.name, runner.run(thisvar)); + runner = this.runners.shift(); + } + } +} diff --git a/packages/dweb-api-server/src/test/TestResolverService.ts b/packages/dweb-api-server/src/test/TestResolverService.ts new file mode 100644 index 0000000..8a74075 --- /dev/null +++ b/packages/dweb-api-server/src/test/TestResolverService.ts @@ -0,0 +1,60 @@ +import { INameService } from "dweb-api-types/dist/name-service.js"; +import { IArweaveResolver } from "dweb-api-types/dist/arweave.js"; +import { IRequestContext } from "dweb-api-types/dist/request-context.js"; + +export class TestResolverService implements INameService, IArweaveResolver { + mappings = new Map< + string, + | string + | null + | { + error: true; + reason: string; + } + >(); + //TODO: this is a hack, scripts/dump_test_cases.ts should run arweave cases through resolveArweave as an extra parameter + resolveArweave: ( + request: IRequestContext, + tx_id: string, + ens_name: string, + ) => Promise = async ( + request: IRequestContext, + tx_id: string, + ens_name: string, + ) => { + const res = this.mappings.get(tx_id); + if (res === undefined) { + throw new Error(`TestResolverService: no mapping for ${ens_name}`); + } + if (typeof res === "string") { + return res.startsWith("arweave://") + ? res.substring("arweave://".length) + : res; + } + if (!res || res.error) { + throw new Error(res?.reason); + } + throw new Error("TestResolverService: invalid mapping"); + }; + + getContentHash( + _request: IRequestContext, + name: string, + ): Promise { + const res = this.mappings.get(name); + if (res === undefined) { + throw new Error(`TestResolverService: no mapping for ${name}`); + } + if (!res) { + return new Promise((resolve) => resolve(null)); + } + if (typeof res === "string") { + return new Promise((resolve) => resolve(res)); + } + throw new Error(res.reason); + } + + set(name: string, value: string | null | { error: true; reason: string }) { + this.mappings.set(name, value); + } +} diff --git a/src/test/cases.json b/packages/dweb-api-server/src/test/cases.json similarity index 100% rename from src/test/cases.json rename to packages/dweb-api-server/src/test/cases.json diff --git a/packages/dweb-api-server/src/test/integration.spec.ts b/packages/dweb-api-server/src/test/integration.spec.ts new file mode 100644 index 0000000..413774b --- /dev/null +++ b/packages/dweb-api-server/src/test/integration.spec.ts @@ -0,0 +1,1149 @@ +import { describe } from "mocha"; +import { expect } from "chai"; +// @ts-ignore //bug with parsing the type assertion +import cases from "./cases.json" with { type: "json" }; +import { Server as ProxyServer } from "../server/index"; +import { + RequestMethod, + RequestOptions, + createRequest, + createResponse, +} from "node-mocks-http"; +import { TestRunner, cartesianProduct } from "./TestCaseGenerator"; +import EventEmitter from "events"; +import { Request, Response } from "express"; +import { ParamsDictionary } from "express-serve-static-core"; +import { ParsedQs } from "qs"; +import { IRequestContext } from "dweb-api-types/dist/request-context.js"; +import { TestResolverService } from "./TestResolverService"; +import { createApplicationConfigurationBindingsManager } from "../dependencies/services.js"; +import { TestConfigurationService } from "../configuration"; +import { + IDomainQueryService, + TestDomainQuerySuperagentService, +} from "../services/DomainsQueryService"; +import { EnvironmentConfiguration } from "../dependencies/BindingsManager"; +import { IDnsQuery } from "../dnsquery"; +import { IDomainRateLimitService } from "../services/DomainRateLimit"; +import { IEnsResolverService } from "dweb-api-types/dist/ens-resolver"; +import { getPeerId } from "dweb-api-resolver/dist/resolver/index"; +import { normalizeUrlFragmentForIpfsSubdomainGateway } from "dweb-api-resolver/dist/resolver/utils"; +import { IHostnameSubstitutionService } from "dweb-api-resolver/dist/HostnameSubstitutionService/index"; +import { + TestLaggyRedisClientProxy, + TestRedisClient, +} from "dweb-api-cache/dist"; +import { TestLoggerService } from "dweb-api-logger/dist/index"; + +type HarnessType = { + configurationService: TestConfigurationService; + redisClient: TestRedisClient; + hostnameSubstitionService: IHostnameSubstitutionService; + testEnsService: TestResolverService; + web3NameSdkService: TestResolverService; + testArweaveResolverService: TestResolverService; + testDomainQuerySuperagentService: TestDomainQuerySuperagentService; + domainQueryService: IDomainQueryService; + testLoggerService: TestLoggerService; + dnsQueryService: IDnsQuery; + domainRateLimit: IDomainRateLimitService; + ensResolverService: IEnsResolverService; +}; + +let buildAppContainer = (): HarnessType => { + const services = createApplicationConfigurationBindingsManager(); + return { + configurationService: services.configuration.getBinding( + EnvironmentConfiguration.Development, + ) as TestConfigurationService, + dnsQueryService: services.dnsQuery.getBinding( + EnvironmentConfiguration.Development, + ), + domainRateLimit: services.domainRateLimit.getBinding( + EnvironmentConfiguration.Development, + ), + hostnameSubstitionService: services.hostnameSubstitution.getBinding( + EnvironmentConfiguration.Development, + ), + testEnsService: services.ensService.getBinding( + EnvironmentConfiguration.Development, + ) as TestResolverService, + web3NameSdkService: services.web3NameSdk.getBinding( + EnvironmentConfiguration.Development, + ) as TestResolverService, + testArweaveResolverService: services.arweaveResolver.getBinding( + EnvironmentConfiguration.Development, + ) as TestResolverService, + testDomainQuerySuperagentService: services.domainQuerySuperagent.getBinding( + EnvironmentConfiguration.Development, + ) as TestDomainQuerySuperagentService, + domainQueryService: services.domainQuery.getBinding( + EnvironmentConfiguration.Development, + ), + testLoggerService: services.logger.getBinding( + EnvironmentConfiguration.Development, + ) as TestLoggerService, + redisClient: services.redisClient.getBinding( + EnvironmentConfiguration.Development, + ) as TestRedisClient, + ensResolverService: services.ensResolver.getBinding( + EnvironmentConfiguration.Development, + ), + }; +}; + +type TestCaseType = { + name: string; + type: "ipfs" | "ipns" | "arweave" | "swarm" | "none"; + contentHash?: string; + additionalInfo: Partial<{ + arweave: { + result: string; + query: string; + subdomain_sandbox_id: string; + }; + }>; +}; + +interface Options { + subdomainSupport: boolean; + ensSocialsRedirect: boolean; + blacklisted: boolean | "throws"; + dohQueryType: "A" | "AAAA" | "CNAME" | "TXT"; + ensError: false | "throws"; + redisIsLaggy: boolean; + ignoreTotalityError?: true; +} + +const populateDefaultOptions = (options: Partial): Options => { + return { + subdomainSupport: true, + ensSocialsRedirect: true, + blacklisted: false, + dohQueryType: "A", + ensError: false, + redisIsLaggy: false, + ...options, + }; +}; + +const possibleOptions: Options[] = cartesianProduct({ + subdomainSupport: [true, false], + ensSocialsRedirect: [true, false], + blacklisted: [true, false, "throws"], + dohQueryType: ["A", "AAAA", "CNAME", "TXT"], + ensError: [false, "throws"], + redisIsLaggy: [false, true], +}) as any as Options[]; +var testCases = (cases as TestCaseType[]) + .map((testCase) => { + return possibleOptions.map((options) => { + return { + ...testCase, + options, + }; + }); + }) + .flatMap((x) => x); +const gen = new TestRunner(testCases); + +type HarnessProxyServerPayloadType = + | { + proxyServer: ProxyServer; + } + | { + caddyServer: ProxyServer; + } + | { + dohServerGetRequest: ProxyServer; + }; + +function isProxyServerPayloadType( + payload: any, +): payload is { proxyServer: ProxyServer } { + return payload.proxyServer !== undefined; +} + +function isCaddyServerPayloadType( + payload: any, +): payload is { caddyServer: ProxyServer } { + return payload.caddyServer !== undefined; +} + +function isDohServerGetPayloadType( + payload: any, +): payload is { dohServer: ProxyServer } { + return payload.dohServerGetRequest !== undefined; +} + +type HarnessPayloadType = HarnessProxyServerPayloadType; + +const harness = + (harnessInput: HarnessType) => + (payload: HarnessPayloadType) => + async (v: TestCaseType & { options: Options }) => { + var { contentHash, additionalInfo, options } = v; + + if (options.redisIsLaggy) { + harnessInput.redisClient.setProxy( + new TestLaggyRedisClientProxy(harnessInput.configurationService), + ); + } + + harnessInput.configurationService.set((conf) => { + conf.ipfs.subdomainSupport = options.subdomainSupport; + conf.ens.socialsEndpointEnabled = options.ensSocialsRedirect; + }); + /* + the distinction between these two variables is important: + - nameResolvedToEnsName is the name that the query logic will (should) see + - nameFromHostMayReferToSubdomainOfHostedProxyAddress is the name that the client will send to the server + -- this means that it CAN be a .eth domain, or it can be a proxied domain such as "vitalik.eth.limo" + -- the underlying assumption is that the server should be agnostic to the difference between these two + -- the server should only care about the underlying ENS name + -- if there's an underlying bug in the public facing interfaces of the server, that bug will end up passing the wrong name to either the query services or otherwise not respect the actual .eth ENS name + + for most of the test cases, nameResolvedToEnsName === nameFromHostMayReferToSubdomainOfHostedProxyAddress + */ + + const nameResolvedToEnsName = + harnessInput.hostnameSubstitionService.substituteHostname(v.name); + const nameFromHostMayReferToSubdomainOfHostedProxyAddress = v.name; + + var testEnsEnsServiceExpectedValue: + | string + | null + | { error: true; reason: string } + | undefined = contentHash; + if (options.ensError) { + testEnsEnsServiceExpectedValue = { + error: !!options.ensError, + reason: "test", + }; + } + + const resolvers = [ + harnessInput.testEnsService, + harnessInput.web3NameSdkService, + ]; + + var theRealTestResolverService: TestResolverService; + + if (nameResolvedToEnsName.endsWith("eth")) { + theRealTestResolverService = harnessInput.testEnsService; + } else if (nameResolvedToEnsName.endsWith("gno")) { + theRealTestResolverService = harnessInput.web3NameSdkService; + } else if (options?.ignoreTotalityError) { + theRealTestResolverService = harnessInput.testEnsService; + } else { + throw "Test case non-totality error"; + } + + if (testEnsEnsServiceExpectedValue !== undefined) { + theRealTestResolverService.set( + nameResolvedToEnsName, + testEnsEnsServiceExpectedValue, + ); + } + + //poison the other resolvers to ensure our factory selects the correct one + resolvers + .filter((resolver) => resolver !== theRealTestResolverService) + .forEach((resolver) => { + if (testEnsEnsServiceExpectedValue === undefined) { + resolver.set(nameResolvedToEnsName, "ASDFASDDFASDHDAHD bad value"); + } else { + //implicit poisoning, undefined is the default + } + }); + + if (additionalInfo.arweave) { + harnessInput.testArweaveResolverService.set( + additionalInfo.arweave.query, + additionalInfo.arweave.result, + ); + } + if (options.blacklisted) { + if (options.blacklisted === "throws") { + //if the service errors, we want to be unavailable + harnessInput.testDomainQuerySuperagentService.error = true; + } else { + harnessInput.testDomainQuerySuperagentService.setBlacklist( + nameResolvedToEnsName, + true, + ); + } + } + const request = createRequestEnsureTotality( + payload, + nameFromHostMayReferToSubdomainOfHostedProxyAddress, + options, + ); + const req = createRequest(request); + const res = createResponse({ + eventEmitter: EventEmitter, + }); + var busyWaiting = true; + res.on("end", () => { + busyWaiting = false; + }); + await callPayloadEnsureTotality(payload, req, res); + while (busyWaiting) { + await new Promise((resolve) => setTimeout(resolve, 10)); + } + const _result = res._getData(); + if (!res._isEndCalled()) { + throw "Response not ended"; + } + const content_location = res.getHeader("x-content-location"); + const content_path = res.getHeader("x-content-path"); + const content_storage_type = res.getHeader("x-content-storage-type"); + + return { + _result, + res, + content_location, + content_path, + content_storage_type, + }; + }; + +describe("Proxy API Integration Tests", function () { + var harnessInput: HarnessType; + var server: ProxyServer; + var commonSetup: any; //not even the language server can figure out what this is + + beforeEach(() => { + let r = buildAppContainer(); + server = new ProxyServer( + r.configurationService, + r.testLoggerService, + r.domainQueryService, + r.ensResolverService, + r.testArweaveResolverService, + r.dnsQueryService, + r.domainRateLimit, + r.hostnameSubstitionService, + ); + harnessInput = r; + commonSetup = harness(harnessInput)({ proxyServer: server }); + }); + + afterEach(() => { + harnessInput = null as any; + server = null as any; + commonSetup = null as any; + }); + + gen.registerTests( + "normal blacklist behavior", + ["name", "type"], + ["ensSocialsRedirect", "dohQueryType"], + function (testCase) { + return ( + testCase.options.blacklisted === true || + testCase.options.blacklisted === "throws" + ); + }, + async function (testCase) { + var { options } = testCase; + + const { res } = await commonSetup(testCase); + + if (options.blacklisted === "throws") { + var expectedResponseCode = 200; + if (options.ensError) { + expectedResponseCode = 500; + } else if (testCase.type === "none" && !options.ensSocialsRedirect) { + expectedResponseCode = 404; + } + expect(res.statusCode).to.be.equal(expectedResponseCode); + } else { + expect(res.statusCode).to.be.equal(451); + } + }, + ); + + gen.registerTests( + "subdomain blacklist behavior", + ["name", "type"], + ["ensSocialsRedirect", "dohQueryType"], + function (testCase) { + return ( + testCase.options.blacklisted === true || + testCase.options.blacklisted === "throws" + ); + }, + async function (originalTestCase) { + const subdomains = ["asdf", "www", "a.b.c", "asdf.gsdh"]; + for (const subdomain of subdomains) { + var testCase = { ...originalTestCase }; + testCase.name = subdomain + "." + originalTestCase.name; + if (originalTestCase.options.blacklisted === "throws") { + //if the service errors, we want to be unavailable + harnessInput.testDomainQuerySuperagentService.error = true; + } else { + harnessInput.testDomainQuerySuperagentService.setBlacklist( + harnessInput.hostnameSubstitionService.substituteHostname( + originalTestCase.name, + ), + true, + ); + } + const fudge = JSON.parse(JSON.stringify(testCase)); + fudge.options.blacklisted = false; //we don't want the subdomain blacklisted, just the original domain + + const { res } = await commonSetup(fudge); + + var { options } = testCase; + if (options.blacklisted === "throws") { + var expectedResponseCode = 200; + if (options.ensError) { + expectedResponseCode = 500; + } else if (testCase.type === "none" && !options.ensSocialsRedirect) { + expectedResponseCode = 404; + } + expect(res.statusCode).to.be.equal( + expectedResponseCode, + `subdomain: ${subdomain}`, + ); + } else { + expect(res.statusCode).to.be.gt(399, `subdomain: ${subdomain}`); + } + } + }, + ); + + gen.registerTests( + "x-content-location and x-content-path", + ["name", "type"], + ["ensSocialsRedirect", "dohQueryType"], + function (testCase) { + return testCase.type === "ipfs" || testCase.type === "ipns"; + }, + async function (testCase) { + var { type, name, contentHash, options } = testCase; + const { content_location, content_path, content_storage_type, res } = + await commonSetup(testCase); + if (options.blacklisted === true) { + expect(res.statusCode).to.be.equal(451); + expect(content_path).to.be.undefined; + expect(content_location).to.be.undefined; + expect(content_storage_type).to.be.undefined; + return; + } + + if (options.ensError) { + expect(res.statusCode).to.be.equal(500); + expect(content_path).to.be.undefined; + expect(content_location).to.be.undefined; + expect(content_storage_type).to.be.undefined; + return; + } + + const request = { + trace_id: "TEST_TRACE_ID", + }; + + contentHash = recalculateIpnsContentHash( + request, + type, + contentHash, + harnessInput, + name, + ); + if (options.subdomainSupport) { + expect(content_path).to.be.equal(`/`); + let fragment = contentHash?.substring(7); + //see the en.wikipedia-on-ipfs.org testcase + if (type === "ipns") { + fragment = + fragment && normalizeUrlFragmentForIpfsSubdomainGateway(fragment); + } + expect(content_location).to.be.equal(`${fragment}.${type}.ipfs`); + } else { + expect(content_path).to.be.equal( + `/${type}/${contentHash?.substring(7)}/`, + ); + expect(content_location).to.be.equal("ipfs"); + } + expect(content_storage_type).to.be.equal( + getCodecFromType(testCase.type as any), + ); + }, + ); + gen.registerTests( + "x-content-location and x-content-path", + ["name", "type"], + ["ensSocialsRedirect", "subdomainSupport", "dohQueryType"], + function (testCase) { + return testCase.type === "arweave"; + }, + async function (testCase) { + var { contentHash, additionalInfo, options } = testCase; + const { content_location, content_path, content_storage_type, res } = + await commonSetup(testCase); + if (options.blacklisted === true) { + expect(res.statusCode).to.be.equal(451); + expect(content_path).to.be.undefined; + expect(content_location).to.be.undefined; + expect(content_storage_type).to.be.undefined; + return; + } + + if (options.ensError) { + expect(res.statusCode).to.be.equal(500); + expect(content_path).to.be.undefined; + expect(content_location).to.be.undefined; + expect(content_storage_type).to.be.undefined; + return; + } + + expect(res.statusCode).to.be.equal(200); + const ar_id = contentHash?.substring("arweave://".length); + expect(content_path).to.be.equal("/" + ar_id + "/"); + expect(additionalInfo.arweave?.subdomain_sandbox_id).to.not.be.undefined; //this would be a bad test case if it was + expect(content_location).to.be.equal( + `${additionalInfo.arweave?.subdomain_sandbox_id}.arweave`, + ); + expect(content_storage_type).to.be.equal( + getCodecFromType(testCase.type as any), + ); + }, + ); + gen.registerTests( + "x-content-location and x-content-path", + ["name", "type"], + ["ensSocialsRedirect", "subdomainSupport", "dohQueryType"], + function (testCase) { + return testCase.type === "swarm"; + }, + async function (testCase) { + var { contentHash, options } = testCase; + const { content_location, content_path, content_storage_type, res } = + await commonSetup(testCase); + if (options.blacklisted === true) { + expect(res.statusCode).to.be.equal(451); + expect(content_path).to.be.undefined; + expect(content_location).to.be.undefined; + expect(content_storage_type).to.be.undefined; + return; + } + + if (options.ensError) { + expect(res.statusCode).to.be.equal(500); + expect(content_path).to.be.undefined; + expect(content_location).to.be.undefined; + expect(content_storage_type).to.be.undefined; + return; + } + + expect(res.statusCode).to.be.equal(200); + expect(content_path).to.be.equal( + "/bzz/" + contentHash?.substring("bzz://".length) + "/", + ); + expect(content_location).to.be.equal("swarm"); + expect(content_storage_type).to.be.equal( + getCodecFromType(testCase.type as any), + ); + }, + ); + gen.registerTests( + "x-content-location and x-content-path", + ["name", "type"], + ["subdomainSupport", "blacklisted", "dohQueryType"], + function (testCase) { + return testCase.type === "none"; + }, + async function (testCase) { + var { name, options } = testCase; + const { content_location, content_path, content_storage_type, res } = + await commonSetup(testCase); + + if (options.ensError) { + expect(res.statusCode).to.be.equal(500); + expect(content_path).to.be.undefined; + expect(content_location).to.be.undefined; + expect(content_storage_type).to.be.undefined; + return; + } + + if (options.ensSocialsRedirect) { + expect(res.statusCode).to.be.equal(200); + expect(content_path).to.be.equal(`/?name=${name}/`); + expect(content_location).to.be.equal("socials.com"); + } else { + expect(res.statusCode).to.be.equal(404); + expect(content_path).to.be.undefined; + expect(content_location).to.be.undefined; + } + + expect(content_storage_type).to.be.undefined; + }, + ); + + gen.runTests(this); + it("should handle IP addresses correctly", async () => { + const { res } = await commonSetup({ + name: "127.0.0.1", + type: "none", + additionalInfo: {}, + contentHash: null, + options: populateDefaultOptions({ + ignoreTotalityError: true, + }), + }); + expect(res.statusCode).to.be.equal(200); + }); + it("should handle IP addresses correctly", async () => { + const { res } = await commonSetup({ + name: "2607:f8b0:4009:804::200e", + type: "none", + additionalInfo: {}, + contentHash: null, + options: populateDefaultOptions({ + ignoreTotalityError: true, + }), + }); + expect(res.statusCode).to.be.equal(500); + }); + + it("should handle long IPFS ens names", async () => { + const { res } = await commonSetup({ + name: "vitalik.jsonapi.eth", + type: "ipfs", + additionalInfo: {}, + contentHash: + "ipfs://bagaaiaf4af5se33lei5hi4tvmuwce5djnvsseorcge3timzqgy4dknzzeiwceytmn5rwwir2eizdemjtg42denjcfqrgk4tdei5dalbcovzwk4rchj5seylemrzgk43tei5cemdymq4giqjwijddenrzgy2gcrrziq3wkrlehfstam2fguztimjviqztoykbhe3danbveiwce3tbnvsseorcozuxiylmnfvs4zlunarcyitcmfwgc3tdmurduirvgizc4nrsgq2teobcfqrha4tjmnsseorcgiydenzogm4tenzugirh27i", + options: populateDefaultOptions({ + subdomainSupport: true, + }), + }); + expect(res.header("x-content-location")).to.equal( + "vitalik-jsonapi-eth.ipns.ipfs", + ); + expect(res.header("x-content-path")).to.equal("/"); + expect(res.header("x-content-storage-type")).to.equal("ipns-ns"); + }); +}); + +describe("Caddy API Integration Tests", function () { + var harnessInput: HarnessType; + var server: ProxyServer; + var commonSetup: any; //not even the language server can figure out what this is + + beforeEach(() => { + let r = buildAppContainer(); + server = new ProxyServer( + r.configurationService, + r.testLoggerService, + r.domainQueryService, + r.ensResolverService, + r.testArweaveResolverService, + r.dnsQueryService, + r.domainRateLimit, + r.hostnameSubstitionService, + ); + harnessInput = r; + commonSetup = harness(harnessInput)({ caddyServer: server }); + }); + + afterEach(() => { + harnessInput = null as any; + server = null as any; + commonSetup = null as any; + }); + + gen.registerTests( + "normal blacklist behavior", + ["name", "type"], + ["ensSocialsRedirect", "dohQueryType"], + function (testCase) { + return ( + testCase.options.blacklisted === true || + testCase.options.blacklisted === "throws" + ); + }, + async function (testCase) { + var { options } = testCase; + + const { res } = await commonSetup(testCase); + + if (options.blacklisted === "throws") { + var expectedResponseCode = 200; + if (options.ensError) { + expectedResponseCode = 500; + } else if (testCase.type === "none" && !options.ensSocialsRedirect) { + expectedResponseCode = 404; + } + expect(res.statusCode).to.be.equal(expectedResponseCode); + } else { + expect(res.statusCode).to.be.equal(451); + } + }, + ); + + gen.registerTests( + "subdomain blacklist behavior", + ["name", "type"], + ["ensSocialsRedirect", "dohQueryType", "subdomainSupport"], + function (testCase) { + return ( + testCase.options.blacklisted === true || + (testCase.options.blacklisted === "throws" && testCase.type !== "none") + ); + }, + async function (originalTestCase) { + const subdomains = ["asdf", "www", "a.b.c", "asdf.gsdh"]; + for (const subdomain of subdomains) { + var testCase = JSON.parse(JSON.stringify(originalTestCase)); + testCase.name = subdomain + "." + originalTestCase.name; + if (originalTestCase.options.blacklisted === "throws") { + //if the service errors, we want to be unavailable + harnessInput.testDomainQuerySuperagentService.error = true; + } else { + harnessInput.testDomainQuerySuperagentService.setBlacklist( + harnessInput.hostnameSubstitionService.substituteHostname( + originalTestCase.name, + ), + true, + ); + } + const fudge = JSON.parse(JSON.stringify(testCase)); + + fudge.options.blacklisted = false; //we don't want the subdomain blacklisted, just the original domain + + const { res } = await commonSetup(fudge); + + var { options } = testCase; + if (options.blacklisted === "throws" && options.ensError !== "throws") { + expect(res.statusCode).to.be.equal(200); + return; + } + + expect(res.statusCode).to.be.greaterThan(399); + } + }, + ); + + gen.registerTests( + "permutation", + ["name", "type"], + ["subdomainSupport", "dohQueryType"], + function (testCase) { + return ( + testCase.type === "ipfs" || + testCase.type === "ipns" || + testCase.type === "arweave" || + testCase.type === "swarm" + ); + }, + async function (testCase) { + var { options } = testCase; + const { res } = await commonSetup(testCase); + if (options.ensError && options.blacklisted !== true) { + expect(res.statusCode).to.be.equal(500); + return; + } + if (options.blacklisted) { + if (options.blacklisted === "throws") { + expect(res.statusCode).to.be.equal(200); + } else { + expect(res.statusCode).to.be.equal(451); + } + return; + } + + if (options.ensError) { + expect(res.statusCode).to.be.equal(500); + return; + } + + expect(res.statusCode).to.be.equal(200); + }, + ); + + gen.registerTests( + "permutation", + ["name", "type"], + ["subdomainSupport", "dohQueryType"], + function (testCase) { + return testCase.type === "none"; + }, + async function (testCase) { + var { options } = testCase; + const { res } = await commonSetup(testCase); + if (options.ensError && options.blacklisted !== true) { + expect(res.statusCode).to.be.equal(500); + return; + } + if (options.blacklisted === true) { + expect(res.statusCode).to.be.equal(451); + return; + } + + if (options.ensError) { + expect(res.statusCode).to.be.equal(500); + return; + } + + if (options.ensSocialsRedirect) { + expect(res.statusCode).to.be.equal(200); + } else { + expect(res.statusCode).to.be.equal(404); + } + }, + ); + + gen.runTests(this); + it("should handle IP addresses correctly", async () => { + const { res } = await commonSetup({ + name: "127.0.0.1", + type: "none", + additionalInfo: {}, + contentHash: null, + options: populateDefaultOptions({ + ignoreTotalityError: true, + }), + }); + expect(res.statusCode).to.be.equal(422); + }); + it("should handle IP addresses correctly", async () => { + const { res } = await commonSetup({ + name: "2607:f8b0:4009:804::200e", + type: "none", + additionalInfo: {}, + contentHash: null, + options: populateDefaultOptions({ + ignoreTotalityError: true, + }), + }); + expect(res.statusCode).to.be.equal(422); + }); +}); + +describe("DoH GET API Integration Tests", function () { + var harnessInput: HarnessType; + var server: ProxyServer; + var commonSetup: any; //not even the language server can figure out what this is + + beforeEach(() => { + let r = buildAppContainer(); + server = new ProxyServer( + r.configurationService, + r.testLoggerService, + r.domainQueryService, + r.ensResolverService, + r.testArweaveResolverService, + r.dnsQueryService, + r.domainRateLimit, + r.hostnameSubstitionService, + ); + harnessInput = r; + commonSetup = harness(harnessInput)({ dohServerGetRequest: server }); + }); + + afterEach(() => { + harnessInput = null as any; + server = null as any; + commonSetup = null as any; + }); + + function handleBlacklistBehaviorTest( + testCase: TestCaseType & { options: Options }, + res: Response, + result: any, + ) { + if (testCase.options.blacklisted === "throws") { + expect(res.statusCode).to.be.equal(200); + } else { + if (res.statusCode === 451) { + //FIXME: this 451 clause shouldn't exist, this is just to describe current behavior + expect(res.statusCode).to.be.equal(451); + return; + } else { + expect(res.statusCode).to.be.equal(200); + const payload = JSON.parse(result as string); + expect(Math.abs(payload.Status)).to.be.equal(0); + expect(payload.Answer).to.be.be.instanceOf(Array); + expect(payload.Answer).to.be.empty; + } + } + } + + gen.registerTests( + "normal blacklist behavior", + ["name", "type"], + ["ensSocialsRedirect", "ensError", "subdomainSupport"], + function (testCase) { + return ( + testCase.options.blacklisted === true || + testCase.options.blacklisted === "throws" + ); + }, + async function (testCase) { + const { _result, res } = await commonSetup(testCase); + handleBlacklistBehaviorTest(testCase, res, _result); + }, + ); + + gen.registerTests( + "subdomain blacklist behavior", + ["name", "type"], + ["ensSocialsRedirect", "ensError", "subdomainSupport"], + function (testCase) { + return ( + testCase.options.blacklisted === true || + testCase.options.blacklisted === "throws" + ); + }, + async function (originalTestCase) { + const subdomains = ["asdf", "www", "a.b.c", "asdf.gsdh"]; + for (const subdomain of subdomains) { + var testCase = { ...originalTestCase }; + testCase.name = subdomain + "." + originalTestCase.name; + if (originalTestCase.options.blacklisted === "throws") { + //if the service errors, we want to be unavailable + harnessInput.testDomainQuerySuperagentService.error = true; + } else { + harnessInput.testDomainQuerySuperagentService.setBlacklist( + originalTestCase.name, + true, + ); + } + const fudge = { ...testCase }; + fudge.options = { ...testCase.options, blacklisted: false }; //we don't want the subdomain blacklisted, just the original domain + + const { _result, res } = await commonSetup(fudge); + handleBlacklistBehaviorTest(testCase, res, _result); + } + }, + ); + + gen.registerTests( + "permutation", + ["name", "type"], + ["ensSocialsRedirect", "subdomainSupport"], + function () { + return true; + }, + async function (testCase) { + var { options, type } = testCase; + const { res, _result } = await commonSetup(testCase); + const request = { + trace_id: "TEST_TRACE_ID", + }; + const contentHash = recalculateIpnsContentHash( + request, + type, + testCase.contentHash, + harnessInput, + testCase.name, + ); + + /* + DoH should *not* respect the server being hosted at an endpoint, it is for raw ENS queries only + */ + if (testCase.name.endsWith("local")) { + expect(res.statusCode).to.be.equal(200); + const ret = JSON.parse(_result); + expect(Math.abs(ret.Status)).to.be.equal(0); + expect(ret.Answer).to.be.be.instanceOf(Array); + expect(ret.Answer).to.be.empty; + return; + } + + if (options.blacklisted === true) { + expect(res.statusCode).to.be.equal(451); + return; + } + + if (options.ensError && options.dohQueryType === "TXT") { + expect(res.statusCode).to.be.equal(200); + const ret = JSON.parse(_result); + expect(Math.abs(ret.Status)).to.be.equal(2); + return; + } + + const result = JSON.parse(_result as string); + expect(res.statusCode).to.be.equal(200); + expect(Math.abs(result.Status)).to.be.equal(0); + expect(result.Answer).to.be.be.instanceOf(Array); + if (options.dohQueryType === "TXT") { + if (type === "none") { + expect(result.Answer).to.be.empty; + return; + } + expect(result.Answer).to.not.be.empty; + const the_result = result.Answer[0]; + expect(the_result.type).to.be.equal(16); + expect(the_result.name).to.be.equal( + harnessInput.hostnameSubstitionService.substituteHostname( + testCase.name, + ), + ); + const prefix = + type === "arweave" ? `ar://` : `/${getDnslinkPrefixFromType(type)}/`; + const dnslink_string = `dnslink=${prefix}${contentHash?.substring(contentHash.indexOf("://") + 3)}`; + expect(the_result.data).to.be.equal(dnslink_string); + //if the default test configuration service was changed, this should be too + expect(the_result.ttl).to.be.equal(69); + } else { + expect(result.Answer).to.be.empty; + } + }, + ); + + gen.runTests(this); + it("should handle IP addresses correctly", async () => { + const { res } = await commonSetup({ + name: "127.0.0.1", + type: "none", + additionalInfo: {}, + contentHash: null, + options: populateDefaultOptions({ + ignoreTotalityError: true, + }), + }); + expect(res.statusCode).to.be.equal(200); + }); + + it("should handle IP addresses correctly", async () => { + const { res } = await commonSetup({ + name: "2607:f8b0:4009:804::200e", + type: "none", + additionalInfo: {}, + contentHash: null, + options: populateDefaultOptions({ + ignoreTotalityError: true, + }), + }); + expect(res.statusCode).to.be.equal(200); + }); + + it("should handle long IPFS ens names", async () => { + const { _result } = await commonSetup({ + name: "vitalik.jsonapi.eth", + type: "ipfs", + additionalInfo: {}, + contentHash: + "ipfs://bagaaiaf4af5se33lei5hi4tvmuwce5djnvsseorcge3timzqgy4dknzzeiwceytmn5rwwir2eizdemjtg42denjcfqrgk4tdei5dalbcovzwk4rchj5seylemrzgk43tei5cemdymq4giqjwijddenrzgy2gcrrziq3wkrlehfstam2fguztimjviqztoykbhe3danbveiwce3tbnvsseorcozuxiylmnfvs4zlunarcyitcmfwgc3tdmurduirvgizc4nrsgq2teobcfqrha4tjmnsseorcgiydenzogm4tenzugirh27i", + options: populateDefaultOptions({ + subdomainSupport: true, + dohQueryType: "TXT", + }), + }); + const result = JSON.parse(_result); + const reply = result.Answer[0]; + expect(reply.name).to.equal("vitalik.jsonapi.eth"); + expect(reply.data).to.equal( + "dnslink=/ipfs/bagaaiaf4af5se33lei5hi4tvmuwce5djnvsseorcge3timzqgy4dknzzeiwceytmn5rwwir2eizdemjtg42denjcfqrgk4tdei5dalbcovzwk4rchj5seylemrzgk43tei5cemdymq4giqjwijddenrzgy2gcrrziq3wkrlehfstam2fguztimjviqztoykbhe3danbveiwce3tbnvsseorcozuxiylmnfvs4zlunarcyitcmfwgc3tdmurduirvgizc4nrsgq2teobcfqrha4tjmnsseorcgiydenzogm4tenzugirh27i", + ); + expect(reply.type).to.equal(16); + }); +}); + +function getCodecFromType(type: "ipfs" | "ipns" | "arweave" | "swarm"): string { + if (type === "ipfs") { + return "ipfs-ns"; + } else if (type === "ipns") { + return "ipns-ns"; + } else if (type === "arweave") { + return "arweave-ns"; + } else if (type === "swarm") { + return "swarm"; + } + return type as never; +} + +function recalculateIpnsContentHash( + request: IRequestContext, + type: string, + contentHash: string | undefined, + harnessInput: HarnessType, + name: string, +) { + if (type === "ipns" && contentHash) { + const peerId = + getPeerId( + request, + harnessInput.testLoggerService, + contentHash.substring(7), + name, + ) || "THIS_SHOULD_NOT_BE_NULL"; + return "ipns://" + peerId; + } + return contentHash; +} + +function getDnslinkPrefixFromType( + type: "ipfs" | "ipns" | "arweave" | "swarm", +): string { + if (type === "ipfs") { + return "ipfs"; + } else if (type === "ipns") { + return "ipns"; + } else if (type === "arweave") { + return "ar"; + } else if (type === "swarm") { + return "bzz"; + } + return type as never; +} + +function createRequestEnsureTotality( + payload: HarnessProxyServerPayloadType, + name: string, + options: Options, +): RequestOptions { + if (isProxyServerPayloadType(payload)) { + return { + method: "GET" as RequestMethod, + url: "localhost", + headers: { + Host: name, + }, + }; + } else if (isCaddyServerPayloadType(payload)) { + return { + method: "GET" as RequestMethod, + url: `http://localhost`, + headers: { + Host: "localhost", + }, + query: { + domain: name, + }, + }; + } else if (isDohServerGetPayloadType(payload)) { + return { + method: "GET" as RequestMethod, + url: `http://localhost`, + headers: { + Host: "localhost", + }, + query: { + name, + type: options.dohQueryType, + }, + }; + } else { + return payload as never; + } +} + +async function callPayloadEnsureTotality( + payload: HarnessProxyServerPayloadType, + req: Request>, + res: Response>, +): Promise { + if (isProxyServerPayloadType(payload)) { + await payload.proxyServer.proxyServer(req, res); + } else if (isCaddyServerPayloadType(payload)) { + await payload.caddyServer.caddy(req, res); + } else if (isDohServerGetPayloadType(payload)) { + await payload.dohServerGetRequest._DnsQuery.dnsqueryGet(req, res); + } else { + return payload as never; + } +} diff --git a/packages/dweb-api-server/src/utils/index.ts b/packages/dweb-api-server/src/utils/index.ts new file mode 100644 index 0000000..a6ae90a --- /dev/null +++ b/packages/dweb-api-server/src/utils/index.ts @@ -0,0 +1,48 @@ +import { Request } from "express"; +import { punycodeDomainPartsToUnicode } from "./punycodeConverter"; +import { VALID_ENS_TLDS } from "../configuration"; +import { IHostnameSubstitutionService } from "dweb-api-resolver/dist/HostnameSubstitutionService/index"; + +export type GetDomainOfRequestFromGetReturnType = { + domain: string; + domain_without_suffix_substitutions: string; +} | null; + +export function getDomainOfRequestFromGet( + hostnameSubstitutionService: IHostnameSubstitutionService, + req: Request, + param = "domain", +): GetDomainOfRequestFromGetReturnType { + let domain = req.query[param]; + if (typeof domain !== "string") { + return null; + } + + domain = punycodeDomainPartsToUnicode(domain); + const domain_without_suffix_substitutions = domain; + domain = hostnameSubstitutionService.substituteHostname(domain); + + if (hostnameIsENSTLD(domain)) { + return { + domain, + domain_without_suffix_substitutions, + }; + } else { + return null; + } +} + +export function hostnameIsENSTLD(hostname: string) { + return ( + VALID_ENS_TLDS.find((tld) => hostname.endsWith("." + tld)) !== undefined + ); +} + +export function getTraceIdFromRequest(req: Request) { + const trace_id_header = "x-limo-id"; + const trace_id = + typeof req.headers[trace_id_header] === "string" + ? req.headers[trace_id_header] + : "UNDEFINED_TRACE_ID"; + return trace_id; +} diff --git a/src/utils/punycodeConverter/index.ts b/packages/dweb-api-server/src/utils/punycodeConverter/index.ts similarity index 100% rename from src/utils/punycodeConverter/index.ts rename to packages/dweb-api-server/src/utils/punycodeConverter/index.ts diff --git a/src/utils/punycodeConverter/punycode.d.ts b/packages/dweb-api-server/src/utils/punycodeConverter/punycode.d.ts similarity index 100% rename from src/utils/punycodeConverter/punycode.d.ts rename to packages/dweb-api-server/src/utils/punycodeConverter/punycode.d.ts diff --git a/packages/dweb-api-server/tsconfig.json b/packages/dweb-api-server/tsconfig.json new file mode 100644 index 0000000..9ed764a --- /dev/null +++ b/packages/dweb-api-server/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "../../tsconfig.base.json", + "exclude": [ ], + "include": [ + "./src/**/*.ts" + ], + "compilerOptions": { + "rootDir": "./src", + "outDir": "./dist", + "baseUrl": "./src", + "moduleResolution": "Bundler", + "module": "ES2022", + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "target": "ES2022", + "lib": ["ES2022"], + "skipLibCheck": true, + }, + "ts-node": { + "esm": true, + "experimentalSpecifierResolution": "node" + } +} \ No newline at end of file diff --git a/packages/dweb-api-serviceworker/.babelrc b/packages/dweb-api-serviceworker/.babelrc new file mode 100644 index 0000000..b57f614 --- /dev/null +++ b/packages/dweb-api-serviceworker/.babelrc @@ -0,0 +1,7 @@ +{ + "presets": [ + "@babel/preset-env", + "@babel/preset-typescript" + ] + } + \ No newline at end of file diff --git a/packages/dweb-api-serviceworker/package.json b/packages/dweb-api-serviceworker/package.json new file mode 100644 index 0000000..251c15c --- /dev/null +++ b/packages/dweb-api-serviceworker/package.json @@ -0,0 +1,45 @@ +{ + "name": "dweb-api-serviceworker", + "version": "1.0.0", + "description": "Proxy middleware for ENS and other on chain naming services", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "author": "eth.limo team", + "license": "MIT", + "keywords": [], + "scripts": { + "build": "rm -rf dist/** && webpack --config webpack.config.mjs", + "start": "http-server dist --cors", + "test": "mocha" + }, + "devDependencies": { + "@babel/core": "^7.25.2", + "@babel/preset-env": "^7.25.3", + "@babel/preset-typescript": "^7.24.7", + "babel-loader": "^9.1.3", + "html-inline-script-webpack-plugin": "^3.2.1", + "html-webpack-plugin": "^5.6.0", + "http-server": "^14.1.1", + "typescript": "^5.5.4", + "webpack": "^5.93.0", + "webpack-cli": "^5.1.4" + }, + "type": "module", + "dependencies": { + "@helia/http": "^1.0.10", + "@helia/verified-fetch": "^1.5.0", + "@sgtpooki/file-type": "^1.0.1", + "assert": "^2.1.0", + "browserify-zlib": "^0.2.0", + "dweb-api-logger": "file:../dweb-api-logger", + "dweb-api-resolver": "^1.0.0", + "ethers": "^6.13.2", + "https-browserify": "^1.0.0", + "os-browserify": "^0.3.0", + "path-browserify": "^1.0.1", + "stream-browserify": "^3.0.0", + "stream-http": "^3.2.0", + "url": "^0.11.4", + "util": "^0.12.5" + } +} diff --git a/packages/dweb-api-serviceworker/src/index.ts b/packages/dweb-api-serviceworker/src/index.ts new file mode 100644 index 0000000..38f1bfc --- /dev/null +++ b/packages/dweb-api-serviceworker/src/index.ts @@ -0,0 +1,83 @@ +import { createBrowserConfig, createServiceWorker, resolveUrl } from "lib.js"; + +(async () => { + if ("serviceWorker" in navigator) { + const swContainer = navigator.serviceWorker as ServiceWorkerContainer; + const controller = swContainer.controller; + if (!controller) { + await Promise.all( + (await swContainer.getRegistrations()).map((registration) => { + return registration.unregister().then((success) => { + if (success) { + console.log("Unregistered service worker", registration.scope); + } else { + console.error( + "Failed to unregister service worker", + registration.scope, + ); + } + }); + }), + ); + } + + const config = createBrowserConfig(); + const services = await createServiceWorker(config); + const location = window.location.toString(); + const shouldReload = await resolveUrl(location, config, services).then( + (x) => { + if ( + x?._tag === "FetchableByUrl" || + x?._tag === "FetchableByVerifiedFetch" + ) { + return true; + } else { + console.error("Failed to resolve URL", location); + return false; + } + }, + ); + + try { + const registration = await swContainer.register("/_limo_loader_sw.js", { + scope: "/", + }); + console.log("Service Worker registered with scope:", registration.scope); + + // Wait for the service worker to activate and control the page + if ( + registration.active || + registration.waiting || + registration.installing + ) { + await new Promise((resolve) => { + const onStateChange = (worker: ServiceWorker | null) => { + if (worker?.state === "activated") { + resolve(true); + } + }; + + if (registration.active) { + resolve(true); + } else if (registration.installing) { + registration.installing.addEventListener("statechange", (event) => + onStateChange(event.target as ServiceWorker), + ); + } else if (registration.waiting) { + registration.waiting.addEventListener("statechange", (event) => + onStateChange(event.target as ServiceWorker), + ); + } + }); + + if (!(await navigator.serviceWorker.ready).active) { + console.error("Service Worker failed to activate"); + } else if (shouldReload) { + window.location.reload(); + } + } + } catch (error) { + console.error("Service Worker registration failed:", error); + } + } +})(); diff --git a/packages/dweb-api-serviceworker/src/lib.ts b/packages/dweb-api-serviceworker/src/lib.ts new file mode 100644 index 0000000..a2c8d58 --- /dev/null +++ b/packages/dweb-api-serviceworker/src/lib.ts @@ -0,0 +1,388 @@ +import type { + IConfigHostnameSubstitution, + IConfigurationArweave, + IConfigurationEnsSocials, + IConfigurationEthereum, + IConfigurationGnosis, + IConfigurationIpfs, + IConfigurationLogger, + IConfigurationSwarm, +} from "dweb-api-types/dist/config.js"; +import { JsonLoggerService } from "dweb-api-logger/dist/jsonlogger.js"; +import { + EnsResolverService, + IEnsResolverServiceResolveEnsRet, +} from "dweb-api-resolver/dist/resolver/index.js"; +import { PassthroughCacheService } from "dweb-api-cache/dist/passthrough.js"; +import { NameServiceFactory } from "dweb-api-resolver/dist/nameservice/index.js"; +import { EnsService } from "dweb-api-resolver/dist/nameservice/EnsService.js"; +import { Web3NameSdkService } from "dweb-api-resolver/dist/nameservice/Web3NameSdkService.js"; +import { ArweaveResolver } from "dweb-api-resolver/dist/resolver/arweave.js"; +import { HostnameSubstitutionService } from "dweb-api-resolver/dist/HostnameSubstitutionService/index.js"; +import { + recordToProxyRecord, + ensureTrailingSlash, + trimExtraneousTrailingSlashes, +} from "dweb-api-resolver/dist/resolver/utils.js"; +import { recordNamespaceToUrlHandlerMap } from "dweb-api-resolver/dist/resolver/const.js"; +import { ContentTypeParser } from "@helia/verified-fetch"; +import { fileTypeFromBuffer } from "@sgtpooki/file-type"; + +export type ServiceWorkerConfig = IConfigurationLogger & + IConfigurationGnosis & + IConfigurationEthereum & + IConfigHostnameSubstitution & + IConfigurationIpfs & + IConfigurationArweave & + IConfigurationSwarm & + IConfigurationEnsSocials & { + verifiedFetch: boolean; + }; + +export const createServiceWorker = async (config: ServiceWorkerConfig) => { + const logger = new JsonLoggerService(); + const cache = new PassthroughCacheService(); + const ensService = new EnsService(config, logger); + const web3NameSdk = new Web3NameSdkService(config, logger); + const factory = new NameServiceFactory(logger, ensService, web3NameSdk); + const arweave = new ArweaveResolver(logger); + const resolver = new EnsResolverService( + logger, + cache, + arweave, + null, + factory, + ); + const hostnameSubstitutionService = new HostnameSubstitutionService( + config, + logger, + ); + + return { + resolver, + hostnameSubstitutionService, + logger, + }; +}; + +export type ServiceWorkerServices = Awaited< + ReturnType +>; + +export const createConfig = ( + urlOfHost: URL | null, + ipfsBackend: URL, + arweaveBackend: URL, +): ServiceWorkerConfig => { + const host = urlOfHost?.hostname; + const ETH_RPC_ENDPOINT = process.env.ETH_RPC_ENDPOINT; + if (!ETH_RPC_ENDPOINT) { + throw "ETH_RPC_ENDPOINT not set"; + } + return { + getLoggerConfig: () => ({ + getLevel: () => "info", + }), + getConfigGnosisBackend: () => ({ + getBackend: () => "https://rpc.gnosischain.com", + }), + getConfigEthereumBackend: () => ({ + getBackend: () => ETH_RPC_ENDPOINT, + }), + getHostnameSubstitutionConfig: () => ({ + ...((host && { + [host]: "eth", + }) || + {}), + localhost: "vitalik.eth", + }), + getConfigIpfsBackend: () => ({ + getBackend: () => ipfsBackend.toString(), + getSubdomainSupport: () => true, + }), + getConfigArweaveBackend: () => ({ + getBackend: () => arweaveBackend.toString(), + }), + getConfigSwarmBackend: () => ({ + getBackend: () => "https://api.gateway.ethswarm.org", + }), + getConfigEnsSocialsEndpoint: () => ({ + getEnsSocialsEndpoint: null, + }), + verifiedFetch: + process.env.SERVICE_WORKER_TRUSTLESS?.toLowerCase() === "true", + }; +}; + +export const createBrowserConfig = () => { + if (!process.env.IPFS_TARGET) { + throw "Invalid IPFS target"; + } + + const urlOfHost = process.env.SW_BUNDLE_PUBLIC_URL + ? new URL(process.env.SW_BUNDLE_PUBLIC_URL) + : null; + return createConfig( + urlOfHost?.hostname?.startsWith("localhost") ? null : urlOfHost, + new URL(process.env.IPFS_TARGET), + new URL("https://permagate.io"), + ); +}; + +export type UrlIsNotEnsName = { + _tag: "UrlIsNotEnsName"; + url: URL; +}; + +export type UrlIsRecord = { + _tag: "URLIsRecord"; + record: IEnsResolverServiceResolveEnsRet; + pathName: string; +}; + +export const resolveUrlToProxyRecord = async ( + location: string, + svcs: ServiceWorkerServices, +): Promise => { + const { hostnameSubstitutionService, resolver } = svcs; + + const sanitizedLocation = location; + + const url = new URL(sanitizedLocation); + url.host = url.host.split(":")[0]; + //substituteHostname always strips protocol + const new_location = + "https://" + hostnameSubstitutionService.substituteHostname(url.toString()); + //verify new_location has a valid protocol + + const new_url = new URL(new_location); + new_url.hostname = new_url.hostname.split(":")[0]; + new_url.search = ""; + //TODO: this should be hostnameIsEnsTld + if (!new_url.hostname.endsWith(".eth")) { + return { + _tag: "UrlIsNotEnsName", + url: new URL(sanitizedLocation), + }; + } + + const response = await resolver.resolveEns( + { trace_id: "service-worker" }, + new_url.hostname, + ); + + return { + _tag: "URLIsRecord", + record: response, + pathName: new URL(sanitizedLocation).pathname, + }; +}; + +type ProxyRecordAlias = Awaited>; + +export const resolveRecordToProxyRecord = async ( + recordWrapper: UrlIsRecord, + config: ServiceWorkerConfig, + svcs: ServiceWorkerServices, +): Promise => { + const { logger } = svcs; + const { record } = recordWrapper; + const unwrappedRecord = record.record; + + if (!unwrappedRecord) { + logger.error("failed to receive record", { + origin: "service-worker-registration", + trace_id: "service-worker-registration", + context: { recordWrapper }, + }); + return null; + } + + logger.info("received record", { + origin: "service-worker", + trace_id: "service-worker", + context: { unwrappedRecord }, + }); + const proxyRecord = await recordToProxyRecord( + { trace_id: "service-worker" }, + config, + logger, + unwrappedRecord, + ); + + return proxyRecord; +}; + +export const resolveProxyRecordToURL = async ( + proxyRecord: ProxyRecordAlias, + pathName: string, + _config: ServiceWorkerConfig, + svcs: ServiceWorkerServices, +): Promise => { + const { logger } = svcs; + if (proxyRecord._tag === "ProxyRecordUnableToRedirect") { + logger.error("Redirect is not available ", { + origin: "service-worker", + trace_id: "service-worker", + context: { proxyRecord }, + }); + return null; + } else if (proxyRecord._tag === "ens-socials-redirect") { + logger.error("Redirect is unimplemented ", { + origin: "service-worker", + trace_id: "service-worker", + context: { proxyRecord }, + }); + return null; + } else { + logger.info("Redirecting to ", { + origin: "service-worker", + trace_id: "service-worker", + context: { proxyRecord }, + }); + const redirect_url = new URL( + ensureTrailingSlash(proxyRecord.XContentLocation) + + proxyRecord.XContentPath, + ); + redirect_url.pathname += pathName; + redirect_url.pathname = trimExtraneousTrailingSlashes( + trimExtraneousTrailingSlashes(redirect_url.pathname), + ); + return redirect_url; + } +}; + +export type FetchableByUrl = { + _tag: "FetchableByUrl"; + url: URL; +}; + +export type FetchableByVerifiedFetch = { + _tag: "FetchableByVerifiedFetch"; + hostname: string; +}; + +export const resolveUrl = async ( + location: string, + config: ServiceWorkerConfig, + svcs: ServiceWorkerServices, +): Promise< + FetchableByUrl | FetchableByVerifiedFetch | UrlIsNotEnsName | null +> => { + const { logger } = svcs; + const proxyRecord = await resolveUrlToProxyRecord(location, svcs); + if (proxyRecord._tag === "UrlIsNotEnsName") { + logger.info("URL is not an ENS name", { + origin: "service-worker", + trace_id: "service-worker", + context: { location }, + }); + return proxyRecord; + } + const resolvedProxyRecord = await resolveRecordToProxyRecord( + proxyRecord, + config, + svcs, + ); + if (!resolvedProxyRecord) { + logger.error("Failed to resolve proxy record", { + origin: "service-worker", + trace_id: "service-worker", + context: { proxyRecord }, + }); + return null; + } + + if ( + config.verifiedFetch && + resolvedProxyRecord._tag === "Record" && + (resolvedProxyRecord.codec === "ipfs-ns" || + resolvedProxyRecord.codec === "ipns-ns") + ) { + const protocol = + recordNamespaceToUrlHandlerMap[ + resolvedProxyRecord.codec as "ipfs-ns" | "ipns-ns" + ]; + const contentIdentifier = resolvedProxyRecord.DoHContentIdentifier; + const locationUrl = new URL(location); + const url = protocol + "://" + contentIdentifier + locationUrl.pathname; + + console.log("location", locationUrl, "url", url); + logger.info("Mapping verified fetch", { + origin: "service-worker", + trace_id: "service-worker", + context: { url: url.toString(), location: location, resolvedProxyRecord }, + }); + return { + _tag: "FetchableByVerifiedFetch", + hostname: url, + }; + } + + const resolvedUrl = await resolveProxyRecordToURL( + resolvedProxyRecord, + proxyRecord.pathName, + config, + svcs, + ); + + if (!resolvedUrl) { + logger.error("Failed to resolve URL", { + origin: "service-worker", + trace_id: "service-worker", + context: { resolvedProxyRecord }, + }); + return null; + } + + return { + _tag: "FetchableByUrl", + url: resolvedUrl, + }; +}; + +// default from verified-fetch is application/octect-stream, which forces a download. This is not what we want for MANY file types. +export const defaultMimeType = "text/html"; + +export const contentTypeParser: ContentTypeParser = async (bytes, fileName) => { + const detectedType = (await fileTypeFromBuffer(bytes))?.mime; + if (detectedType != null) { + return detectedType; + } + if (fileName == null) { + // no other way to determine file-type. + return defaultMimeType; + } + + // no need to include file-types listed at https://github.com/SgtPooki/file-type#supported-file-types + console.log(fileName); + switch (fileName.split(".").pop()) { + case "css": + return "text/css"; + case "html": + return "text/html"; + case "js": + return "application/javascript"; + case "json": + return "application/json"; + case "txt": + return "text/plain"; + case "woff2": + return "font/woff2"; + // see bottom of https://github.com/SgtPooki/file-type#supported-file-types + case "svg": + return "image/svg+xml"; + case "csv": + return "text/csv"; + case "doc": + return "application/msword"; + case "xls": + return "application/vnd.ms-excel"; + case "ppt": + return "application/vnd.ms-powerpoint"; + case "msi": + return "application/x-msdownload"; + default: + return defaultMimeType; + } +}; diff --git a/packages/dweb-api-serviceworker/src/service-worker.ts b/packages/dweb-api-serviceworker/src/service-worker.ts new file mode 100644 index 0000000..dd81da6 --- /dev/null +++ b/packages/dweb-api-serviceworker/src/service-worker.ts @@ -0,0 +1,160 @@ +import { + contentTypeParser, + createBrowserConfig, + createServiceWorker, + resolveUrl, +} from "lib.js"; +import { createHeliaHTTP } from "@helia/http"; +import { VerifiedFetch, createVerifiedFetch } from "@helia/verified-fetch"; + +self.addEventListener("install", (event) => { + (event as any).waitUntil((self as any).skipWaiting()); +}); + +self.addEventListener("activate", (event) => { + (event as any).waitUntil((self as any).clients.claim()); +}); + +var verifiedFetch: null | VerifiedFetch = null; + +const innerEventHandler = async (event: any): Promise => { + const config = createBrowserConfig(); + const services = await createServiceWorker(config); + const { logger } = services; + if (config.verifiedFetch && verifiedFetch === null) { + logger.info("Creating verified fetch", { + origin: "service-worker-fetch", + trace: "service-worker", + }); + + //TODO: this needs to have configuration options + verifiedFetch = await createVerifiedFetch(await createHeliaHTTP(), { + contentTypeParser, + }); + } + let url = new URL(event.request.url); + if (!process.env.SW_BUNDLE_PUBLIC_URL) { + throw "SW_BUNDLE_PUBLIC_URL not set"; + } + const SW_BUNDLE_PUBLIC_URL = new URL(process.env.SW_BUNDLE_PUBLIC_URL); + + if ( + (url.hostname === SW_BUNDLE_PUBLIC_URL.hostname || + url.hostname.endsWith("." + SW_BUNDLE_PUBLIC_URL.hostname)) && + (url.pathname === "/_limo_loader_main.js" || + url.pathname === "/_limo_loader_worker.js") + ) { + logger.info("Serving bundle", { + origin: "service-worker-fetch", + trace: "service-worker", + context: { + url: url.toString(), + }, + }); + const request = new Request(event.request); + return fetch(request); + } + + const resolvedUrl = await resolveUrl(url.toString(), config, services); + if (!resolvedUrl) { + const response = new Response(undefined, { + status: 404, + statusText: "ENS name doesn't exist", + }); + + return response; + } else if (resolvedUrl._tag === "UrlIsNotEnsName") { + return fetch(event.request.url, prepareFetchInit(event, false)); + } else if (resolvedUrl._tag === "FetchableByUrl") { + logger.info("Fetching", { + origin: "service-worker-fetch", + trace: "service-worker", + context: { + url: resolvedUrl.url.toString(), + }, + }); + const new_location = new URL(resolvedUrl.url); + new_location.pathname = url.pathname; + new_location.search = url.search; + new_location.searchParams.delete("limoCacheBuster"); + url = new_location; + const newRequest = prepareFetchUrl(event, url, false); + return fetch(newRequest.url, newRequest.requestInit); + } else if (resolvedUrl._tag === "FetchableByVerifiedFetch") { + if (verifiedFetch === null) { + throw new Error("verifiedFetch is not set"); + } + logger.info("Verified fetch", { + origin: "service-worker-fetch", + trace: "service-worker", + context: { + url: resolvedUrl.hostname, + }, + }); + console.log("Verified fetch", resolvedUrl.hostname); + var ret = await verifiedFetch( + resolvedUrl.hostname, + prepareFetchInit(event, false), + ); + var old_ret = ret; + do { + old_ret = ret; + if (ret.redirected) { + ret = await verifiedFetch(ret.url, prepareFetchInit(event, false)); + } + if ( + ret.status === 301 || + ret.status === 302 || + ret.status === 303 || + ret.status === 307 || + ret.status === 308 + ) { + ret = await verifiedFetch( + ret.headers.get("Location") as string, + prepareFetchInit(event, false), + ); + } + } while (old_ret !== ret); + + console.log(ret); + + return ret; + } + + // This is a totality check to ensure that we are handling all cases + // it should always be of type never + const totalityCheck = resolvedUrl; + return totalityCheck; +}; + +self.addEventListener("fetch", (event: any) => { + event.respondWith(innerEventHandler(event)); +}); +function prepareFetchUrl( + event: any, + url: URL, + allowCredentials: boolean = true, +) { + const requestInit = prepareFetchInit(event, allowCredentials); + // Create a new request with the updated URL + const newRequest = { url, requestInit }; + + console.log("Responding with", newRequest); + return newRequest; +} + +function prepareFetchInit(event: any, allowCredentials: boolean) { + return { + method: event.request.method, + headers: event.request.headers, + mode: "cors" as RequestMode, + credentials: allowCredentials ? event.request.credentials : "omit", + redirect: event.request.redirect, + referrer: event.request.referrer, + body: event.request.body, + cache: event.request.cache, + integrity: event.request.integrity, + keepalive: event.request.keepalive, + duplex: event.request.duplex ?? "half", + }; +} diff --git a/packages/dweb-api-serviceworker/tsconfig.json b/packages/dweb-api-serviceworker/tsconfig.json new file mode 100644 index 0000000..636f0a8 --- /dev/null +++ b/packages/dweb-api-serviceworker/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../tsconfig.base.json", + "exclude": [ ], + "include": [ + "src/**/*.ts" + ], + "compilerOptions": { + "rootDir": "src", + "outDir": "dist", + "baseUrl": "src", + "lib": ["DOM", "ES2020"], + } +} \ No newline at end of file diff --git a/packages/dweb-api-serviceworker/webpack.config.mjs b/packages/dweb-api-serviceworker/webpack.config.mjs new file mode 100644 index 0000000..27547a5 --- /dev/null +++ b/packages/dweb-api-serviceworker/webpack.config.mjs @@ -0,0 +1,114 @@ +import { fileURLToPath } from 'url'; +import { dirname, resolve } from 'path'; +import HtmlWebpackPlugin from 'html-webpack-plugin' +import HtmlInlineScriptPlugin from 'html-inline-script-webpack-plugin' +import { createRequire } from 'node:module'; +import wp from 'webpack'; +import NodePolyfillPlugin from 'node-polyfill-webpack-plugin'; +const { DefinePlugin } = wp; + +const require = createRequire(import.meta.url); + + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); + +const createDefaultPlugins = () => { + return [ + new NodePolyfillPlugin(), + new DefinePlugin({ + 'process.env': { + 'SERVICE_WORKER_TRUSTLESS': JSON.stringify(process.env.SERVICE_WORKER_TRUSTLESS || 'false'), + 'IPFS_TARGET': JSON.stringify(process.env.IPFS_TARGET), + 'NODE_DEBUG': JSON.stringify(false), + 'ETH_RPC_ENDPOINT': JSON.stringify(process.env.ETH_RPC_ENDPOINT), + 'SW_BUNDLE_PUBLIC_URL': process.env.SW_BUNDLE_PUBLIC_URL ? JSON.stringify(process.env.SW_BUNDLE_PUBLIC_URL) : null //should be e.g. http://eth.limo || null + }, + }), + ]; +} + +const resolveRules = { + extensions: ['.ts', '.js'], + alias: { + 'lib.js': resolve(__dirname, 'src/lib.ts'), + } +}; +export default [{ + mode: 'development', + entry: { + '_limo_loader_main': ['./src/index.ts'], + }, + output: { + filename: '[name].js', + path: resolve(__dirname, 'dist'), + clean: false, + module: true, + libraryTarget: 'self', + scriptType: 'text/javascript', + }, + resolve: resolveRules, + module: { + rules: [ + { + test: /\.ts$/, + use: 'babel-loader', + exclude: /node_modules/, + resolve: { + fullySpecified: false, + }, + }, + { + test: /\.m?js$/, + resolve: { + fullySpecified: false, + }, + } + ], + }, + plugins: createDefaultPlugins().concat([ + new HtmlWebpackPlugin() + ]), + experiments: { + outputModule: true, + }, + devtool: 'source-map', +}, +{ + mode: 'development', + entry: { + '_limo_loader_sw': './src/service-worker.ts', + }, + output: { + filename: '[name].js', + path: resolve(__dirname, 'dist'), + clean: false, + module: true, + libraryTarget: 'self', + scriptType: "text/javascript" + }, + resolve: resolveRules, + module: { + rules: [ + { + test: /\.ts$/, + use: 'babel-loader', + exclude: /node_modules/, + }, + { + test: /\.m?js$/, + resolve: { + fullySpecified: false, + }, + } + ], + }, + plugins: createDefaultPlugins(), + experiments: { + outputModule: true, + }, + node: { + global: true, + }, + devtool: 'source-map', +}]; diff --git a/packages/dweb-api-types/package.json b/packages/dweb-api-types/package.json new file mode 100644 index 0000000..7180cf1 --- /dev/null +++ b/packages/dweb-api-types/package.json @@ -0,0 +1,12 @@ +{ + "name": "dweb-api-types", + "version": "1.0.0", + "description": "Shared type definitions for dweb-api", + "scripts": { + "fmt": "prettier src -w", + "build": "tsc" + }, + "author": "eth.limo team", + "license": "MIT", + "type": "module" +} diff --git a/packages/dweb-api-types/src/arweave.ts b/packages/dweb-api-types/src/arweave.ts new file mode 100644 index 0000000..2454a31 --- /dev/null +++ b/packages/dweb-api-types/src/arweave.ts @@ -0,0 +1,9 @@ +import { IRequestContext } from "./request-context.js"; + +export interface IArweaveResolver { + resolveArweave: ( + request: IRequestContext, + tx_id: string, + ens_name: string, + ) => Promise; +} diff --git a/packages/dweb-api-types/src/cache.ts b/packages/dweb-api-types/src/cache.ts new file mode 100644 index 0000000..a53fe51 --- /dev/null +++ b/packages/dweb-api-types/src/cache.ts @@ -0,0 +1,27 @@ +import { IRequestContext } from "./request-context.js"; +import { ZodType } from "zod"; + +export interface ICacheService { + memoize: ( + request: IRequestContext, + fThunk: () => Promise>, + schema: ZodType, + dbPrefix: string, + key: string, + ) => Promise>; + getTtl: ( + request: IRequestContext, + dbPrefix: string, + key: string, + ) => Promise; +} + +export interface INamedMemoryCache { + getServiceName(): string; + put: (request: IRequestContext, key: string, v: T, ttl?: number) => void; + get: (request: IRequestContext, key: string) => Promise; + getTtl: ( + request: IRequestContext, + key: string, + ) => Promise; +} diff --git a/packages/dweb-api-types/src/config.ts b/packages/dweb-api-types/src/config.ts new file mode 100644 index 0000000..807a13c --- /dev/null +++ b/packages/dweb-api-types/src/config.ts @@ -0,0 +1,114 @@ +export interface IConfigurationEnsSocials { + getConfigEnsSocialsEndpoint: () => { + getEnsSocialsEndpoint: null | ((ensName: string) => string); + }; +} + +export interface IConfigurationIpfs { + getConfigIpfsBackend: () => { + getBackend: () => string; + getSubdomainSupport: () => boolean; + }; +} + +export interface IConfigurationArweave { + getConfigArweaveBackend: () => { + getBackend: () => string; + }; +} + +export interface IConfigurationSwarm { + getConfigSwarmBackend: () => { + getBackend: () => string; + }; +} + +export interface IConfigurationEthereum { + getConfigEthereumBackend: () => { + getBackend: () => string; + }; +} + +export interface IConfigurationEthereumFailover { + getConfigEthereumFailover: () => { + getStallTimeout: () => number; + getProviderStallTimeout: () => number; + getQuorum: () => number; + getPrimaryFailoverBackend: () => string | null; + getSecondaryFailoverBackend: () => string | null; + }; +} + +export interface IConfigurationGnosis { + getConfigGnosisBackend: () => { + getBackend: () => string; + }; +} + +export type IConfigurationLogger = { + getLoggerConfig: () => { + getLevel: () => "warn" | "error" | "info" | "debug"; + }; +}; + +export type HostnameSubstitutionConfiguration = { + [key: string]: string; +}; + +export interface IConfigHostnameSubstitution { + getHostnameSubstitutionConfig: () => HostnameSubstitutionConfiguration; +} + +export interface IDomainQueryConfig { + getDomainQueryConfig: () => { + getDomainsApiEndpoint: () => string; + getMaxHops: () => number; + } | null; +} + +export interface IRedisConfig { + getRedisConfig: () => { + getUrl: () => string; + }; +} + +export interface ICacheConfig { + getCacheConfig: () => { + getTtl: () => number; + }; +} + +export interface IAskEndpointConfig { + getConfigAskEndpoint: () => { + getMaxLabelLimit: () => number; + getRateEnabled: () => boolean; + getRateLimit: () => number; + getRatePeriod: () => number; + }; +} + +export interface IConfigurationServerRouter { + getRouterConfig: () => { + getRouterListenPort: () => string; + }; +} +export interface IConfigurationServerDnsquery { + getDnsqueryRouterConfig: () => { + getDnsqueryRouterListenPort: () => string; + getDnsqueryRouterEnabled: () => boolean; + }; +} +export interface IConfigurationServerAsk { + getAskRouterConfig: () => { + getAskRouterListenPort: () => string; + getAskRouterEnabled: () => boolean; + }; +} + +export interface IConfigurationKubo { + getKuboConfiguration: () => { + getKuboApiUrl: () => URL | null; + getKuboTimeoutMs: () => number | null; + getKuboAuth: () => string | null; + }; +} diff --git a/packages/dweb-api-types/src/dweb-api-resolver.ts b/packages/dweb-api-types/src/dweb-api-resolver.ts new file mode 100644 index 0000000..4b37627 --- /dev/null +++ b/packages/dweb-api-types/src/dweb-api-resolver.ts @@ -0,0 +1,4 @@ +export interface ProxyRecord { + XContentLocation: string; + XContentPath: string; +} diff --git a/packages/dweb-api-types/src/ens-resolver.ts b/packages/dweb-api-types/src/ens-resolver.ts new file mode 100644 index 0000000..0c796d4 --- /dev/null +++ b/packages/dweb-api-types/src/ens-resolver.ts @@ -0,0 +1,42 @@ +import { z } from "zod"; +import { IRequestContext } from "./request-context.js"; + +export const RECORD_CODEC_TYPE = z.enum([ + "ipfs-ns", + "ipns-ns", + "arweave-ns", + "swarm", +]); + +export const Record = z + .union([ + z.object({ + _tag: z.literal("Record"), + codec: RECORD_CODEC_TYPE, + DoHContentIdentifier: z.string(), + ensName: z.string(), + }), + z.object({ + _tag: z.literal("ens-socials-redirect"), + ensName: z.string(), + }), + ]) + .nullable(); + +export type IRecord = z.infer; + +export interface IEnsResolverService { + resolveEns( + request: IRequestContext, + hostname: string, + ): Promise; +} + +export const ZodIEnsResolverServiceResolveEnsRet = z.object({ + record: Record, + resolverExists: z.boolean(), +}); + +export type IEnsResolverServiceResolveEnsRet = z.infer< + typeof ZodIEnsResolverServiceResolveEnsRet +>; diff --git a/packages/dweb-api-types/src/kubo-api.ts b/packages/dweb-api-types/src/kubo-api.ts new file mode 100644 index 0000000..4caa182 --- /dev/null +++ b/packages/dweb-api-types/src/kubo-api.ts @@ -0,0 +1,8 @@ +import { IRequestContext } from "./request-context.js"; + +export interface IKuboApiService { + resolveIpnsName( + request: IRequestContext, + name: string, + ): Promise; +} diff --git a/packages/dweb-api-types/src/logger.ts b/packages/dweb-api-types/src/logger.ts new file mode 100644 index 0000000..9f36606 --- /dev/null +++ b/packages/dweb-api-types/src/logger.ts @@ -0,0 +1,12 @@ +export type ILoggerService = { + error: (message: string, context: ILoggerServiceContext) => void; + warn: (message: string, context: ILoggerServiceContext) => void; + info: (message: string, context: ILoggerServiceContext) => void; + debug: (message: string, context: ILoggerServiceContext) => void; +}; + +export type ILoggerServiceContext = { + origin: string; + trace_id: string; + context?: Object; +}; diff --git a/packages/dweb-api-types/src/name-service.ts b/packages/dweb-api-types/src/name-service.ts new file mode 100644 index 0000000..bf59931 --- /dev/null +++ b/packages/dweb-api-types/src/name-service.ts @@ -0,0 +1,15 @@ +import { IRequestContext } from "./request-context.js"; + +export interface INameService { + getContentHash( + request: IRequestContext, + name: string, + ): Promise; +} + +export interface INameServiceFactory { + getNameServiceForDomain( + request: IRequestContext, + domain: string, + ): INameService; +} diff --git a/packages/dweb-api-types/src/redis.ts b/packages/dweb-api-types/src/redis.ts new file mode 100644 index 0000000..cf4fa49 --- /dev/null +++ b/packages/dweb-api-types/src/redis.ts @@ -0,0 +1,7 @@ +export interface IRedisClient { + get(key: string): Promise; + set(key: string, value: string, duration: number): Promise; + ttl(key: string): Promise; + expire(key: string, duration: number): Promise; + incr(key: string): Promise; +} diff --git a/src/services/lib/index.ts b/packages/dweb-api-types/src/request-context.ts similarity index 60% rename from src/services/lib/index.ts rename to packages/dweb-api-types/src/request-context.ts index 5c59dee..ae1a8ae 100644 --- a/src/services/lib/index.ts +++ b/packages/dweb-api-types/src/request-context.ts @@ -1,3 +1,3 @@ export interface IRequestContext { - trace_id: string, -} \ No newline at end of file + trace_id: string; +} diff --git a/packages/dweb-api-types/tsconfig.json b/packages/dweb-api-types/tsconfig.json new file mode 100644 index 0000000..d3de9eb --- /dev/null +++ b/packages/dweb-api-types/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "../../tsconfig.base.json", + "exclude": [ ], + "include": [ + "./src/**/*.ts" + ], + "compilerOptions": { + "rootDir": "./src", + "outDir": "./dist", + "baseUrl": "./src", + "moduleResolution": "bundler", + "module": "ES2022" + }, +} \ No newline at end of file diff --git a/src/configuration/index.ts b/src/configuration/index.ts deleted file mode 100644 index 247b154..0000000 --- a/src/configuration/index.ts +++ /dev/null @@ -1,165 +0,0 @@ -import {injectable } from "inversify"; - -export const VALID_ENS_TLDS = [ - "eth", - "gno", - "art", -] - - -//FIXME: ??? -const createHostname = (...args: string[]) => { - var ret = []; - for (const v of args) { - ret.push(v.replace(".", "")); - } - return ret; -}; - -const configuration = { - // Ethereum JSON RPC endpoint - ethereum: { - rpc: process.env.ETH_RPC_ENDPOINT || "http://192.168.1.7:8845", - failover_primary: process.env.ETH_RPC_ENDPOINT_FAILOVER_PRIMARY || null, - failover_secondary: process.env.ETH_RPC_ENDPOINT_FAILOVER_SECONDARY || null, - provider_stall_timeout_ms: parseInt(process.env.ETH_PROVIDER_STALL_TIMEOUT_MS || "200"), //see fallbackProviderConfig.stallTimeout - provider_timeout_ms: parseInt(process.env.ETH_PROVIDER_TIMEOUT_MS || "7000"), //see provider._getConnection().timeout - quorum: parseInt(process.env.ETH_PROVIDER_QUORUM || "1"), - }, - gnosis: { - rpc: process.env.GNO_RPC_ENDPOINT || "https://rpc.gnosischain.com", - }, - // Storage backends - ipfs: { - backend: process.env.IPFS_TARGET || "http://127.0.0.1:8080", - auth: process.env.IPFS_AUTH_KEY || null, - //if true, proxies {cid}.{ipfs/ipns}.IPFS_TARGET - subdomainSupport: - process.env.IPFS_SUBDOMAIN_SUPPORT === "true" ? true : false, - //ms before we give up and just return an ipns record - kubo_timeout_ms: parseInt(process.env.IPFS_KUBO_TIMEOUT_MS || "2500"), - //this has no default because we assume this isn't available - kubo_api_url: process.env.IPFS_KUBO_API_URL && new URL(process.env.IPFS_KUBO_API_URL) || undefined, - }, - arweave: { - backend: process.env.ARWEAVE_TARGET || "https://arweave.net", - }, - swarm: { - backend: process.env.SWARM_TARGET || "https://api.gateway.ethswarm.org", - }, - redis: { - url: process.env.REDIS_URL || "redis://127.0.0.1:6379", - }, - cache: { - ttl: parseInt(process.env.CACHE_TTL || "300"), - purge: process.env.PURGE_CACHE_ON_START === "true" ? true : false, - purge_count: parseInt(process.env.PURGE_CACHE_COUNT || "20000"), - purge_pattern: process.env.PURGE_CACHE_PATTERN || `*.eth.limo`, - }, - // Proxy - router: { - listen: process.env.LISTEN_PORT || 8888, - origin: "LIMO Proxy", - hostnameSubstitutionConfig: process.env.LIMO_HOSTNAME_SUBSTITUTION_CONFIG || JSON.stringify({ - "eth.limo": "eth", - "eth.local": "eth", - "gno.limo": "gno", - "gno.local": "gno", - }) - }, - // Server ask endpoint - ask: { - listen: process.env.ASK_LISTEN_PORT || 9090, - enabled: process.env.ASK_ENABLED || "false", - rate: { - limit: Number(process.env.ASK_RATE_LIMIT ?? 10), - //configuration.ask.rate.period: input in minutes, actual value in seconds - period: Number(process.env.ASK_RATE_PERIOD ?? 15) * 60, - enabled: false, //set via limit = 0 - }, - }, - //dns-query isolated endpoint (DOH) - dnsquery: { - listen: process.env.DNSQUERY_LISTEN_PORT || 11000, - enabled: process.env.DNSQUERY_ENABLED === "false" ? false : true, - }, - tests: { - hostname: "vitalik.eth", - }, - ens: { - socialsEndpoint: (ens: string) => { - return `https://landing.nimi.page${ens ? "/?ens=" + encodeURI(ens) : ""}`; - }, - socialsEndpointEnabled: process.env.ENS_SOCIALS_ENDPOINT_ENABLED === "true" ? true : false, - }, - domainsapi: { - ttl: 60, - endpoint: process.env.DOMAINSAPI_ENDPOINT, - max_hops: 5, //e.g. asdf.limo -> whatever -> whatever.eth - }, - logging: { - level: process.env.LOG_LEVEL || "info", - } -}; -configuration.ask.rate.enabled = configuration.ask.rate.limit > 0; - -export type IConfiguration = typeof configuration; -export interface IConfigurationService { - get(): IConfiguration; -} - -@injectable() -export class DefaultConfigurationService implements IConfigurationService { - get(): IConfiguration { - return configuration; - } -} - -@injectable() -export class TestConfigurationService implements IConfigurationService { - - configuration: IConfiguration; - - constructor() { - this.configuration = JSON.parse(JSON.stringify(configuration)); - this.configuration.ethereum.rpc = "http://localhost:69420"; //ethers is shimmed - this.configuration.ethereum.failover_primary = null; - this.configuration.ethereum.failover_secondary = null; - this.configuration.ethereum.quorum = 1; - this.configuration.ethereum.provider_stall_timeout_ms = 200; - this.configuration.ipfs.backend = "https://ipfs"; //ipfs is never actually queried - this.configuration.ipfs.auth = null; - this.configuration.ipfs.subdomainSupport = true; - this.configuration.redis.url = "redis://redis"; //redis is shimmed - this.configuration.ask.enabled = "false"; - this.configuration.dnsquery.enabled = false; - this.configuration.cache.ttl = 69; - this.configuration.logging.level = "debug"; - this.configuration.swarm.backend = "https://swarm"; //swarm is never actually queried - this.configuration.arweave.backend = "https://arweave"; //arweave is never actually queried - this.configuration.ens.socialsEndpoint = (ens: string) => { - return `https://socials.com?name=${ens}` - }; - this.configuration.domainsapi.endpoint = "https://domainsapi"; //this needs to be set otherwise it will short circuit to not blacklisted - this.configuration.ask.rate.enabled = false; - //the rate limiter being set to 2 ensures that any shared state between test cases causes a test failure explosion - //this is a good thing, as it means that debugging a bug in the test harness is easier - //the rate limiter is a good smell test for accidental shared state in the test harness because it's a state machine and there are definitely at least 2 cases that can hit it - //a bug in the test harness was discovered because there were tests that were erroring out due to rate limiting being triggered erroneously - //the problem was that beforeEach and afterEach weren't being called because mocha's 'it' wasn't getting a correct binding to the Mocha.Suite - //mocha relies on stateful access of Mocha.Suite (the thisvar in a 'describe' function) to do something similar to what our test harness does on top of Mocha - //for more information, look up why mocha doesn't support arrow functions and requires using regular `function (params) {}` blocks - this.configuration.ask.rate.limit = 2; - this.configuration.ask.rate.period = 30; - //we choose not to test with this because the default behavior for the kubo service is to die quickly and revert to the regular behavior where kubo is absent - this.configuration.ipfs.kubo_api_url = undefined; - } - - get(): IConfiguration { - return this.configuration; - } - - set(callback: (configuration: IConfiguration) => void) { - callback(this.configuration) - } -} \ No newline at end of file diff --git a/src/dependencies/BindingsManager.ts b/src/dependencies/BindingsManager.ts deleted file mode 100644 index 63244ca..0000000 --- a/src/dependencies/BindingsManager.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { Container, interfaces } from "inversify"; -import { DITYPES } from "./types"; - -export enum EnvironmentConfiguration { - Production = "Production", - Development = "Development", - LiveDataIntegration = "LiveDataIntegration" -} -type EnvironmentBindingConfig = { - type: "class"; - theConstructor: interfaces.Newable; - notASingleton?: true; //default behavior is to be a singleton, field should only be present if binding isn't singleton -} | { - type: "factory"; - factory: interfaces.FactoryCreator; -}; -interface BindingConfig { - key: keyof typeof DITYPES; - config: { - [K in EnvironmentConfiguration]: EnvironmentBindingConfig; - }; -} -export class BindingsManager { - private container: Container; - private configs: ((env: EnvironmentConfiguration) => void)[] = []; - private seenKeys: Set = new Set(); - - constructor() { - this.container = new Container(); - } - - registerBinding(bindingConfig: BindingConfig) { - if (this.seenKeys.has(bindingConfig.key)) { - throw new Error(`Duplicate binding for ${bindingConfig.key}`); - } - this.configs.push((env: EnvironmentConfiguration) => { - const config = bindingConfig.config[env]; - if (config.type === "class") { - const binding = this.container.bind(DITYPES[bindingConfig.key]).to(config.theConstructor); - if (!config.notASingleton) { - binding.inSingletonScope(); - } - } else if (config.type === "factory") { - this.container.bind(DITYPES[bindingConfig.key]).toFactory(config.factory); - } - }); - this.seenKeys.add(bindingConfig.key); - } - - bindAll(environment: EnvironmentConfiguration) { - Object.keys(DITYPES).forEach((key) => { - if (!this.seenKeys.has(key as keyof typeof DITYPES)) { - throw new Error(`No binding for ${key}`); - } - }); - this.configs.forEach((config) => { - config(environment); - }); - return { - container: this.container, - environment - }; - } -} diff --git a/src/dependencies/inversify.config.ts b/src/dependencies/inversify.config.ts deleted file mode 100644 index 09bca19..0000000 --- a/src/dependencies/inversify.config.ts +++ /dev/null @@ -1,316 +0,0 @@ -import { interfaces } from "inversify"; -import { BindingsManager, EnvironmentConfiguration } from "./BindingsManager"; -import { IConfigurationService, DefaultConfigurationService, TestConfigurationService } from "../configuration"; -import { IDnsQuery, DnsQuery } from "../dnsquery"; -import { ICacheService, LocallyCachedRedisCacheService, IRedisClient, RedisClient, TestRedisClient, MemoryCacheFactory, INamedMemoryCache } from "../services/CacheService"; -import { IDomainQueryService, DomainQueryService, IDomainQuerySuperagentService, DomainQuerySuperagentService, TestDomainQuerySuperagentService } from "../services/DomainsQueryService"; -import { IEnsResolverService, EnsResolverService } from "../services/EnsResolverService"; -import { IArweaveResolver, ArweaveResolver } from "../services/EnsResolverService/arweave"; -import { INameService, INameServiceFactory, NameServiceFactory } from "../services/NameService"; -import { ILoggerService, LoggerService, TestLoggerService } from "../services/LoggerService"; -import { TestResolverService } from "../test/TestResolverService"; -import { DomainRateLimitService, IDomainRateLimitService } from "../services/DomainRateLimit"; -import { IKuboApiService, KuboApiService } from "../services/KuboApiService"; -import { EnsService } from "../services/NameService/EnsService"; -import { HostnameSubstitutionService, IHostnameSubstitutionService } from "../services/HostnameSubstitutionService"; -import { Web3NameSdkService } from "../services/NameService/Web3NameSdkService"; - -export const createApplicationConfigurationBindingsManager = () => { - const bindingsManager = new BindingsManager(); - bindingsManager.registerBinding({ - key: "EnsService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: EnsService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: TestResolverService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: EnsService, - }, - } - }); - bindingsManager.registerBinding({ - key: "Web3NameSdkService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: Web3NameSdkService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: TestResolverService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: Web3NameSdkService, - }, - } - }); - - bindingsManager.registerBinding({ - key: "EnsResolverService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: EnsResolverService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: EnsResolverService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: EnsResolverService, - }, - } - }); - - bindingsManager.registerBinding({ - key: "LoggerService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: LoggerService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: TestLoggerService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: LoggerService, - }, - } - }); - - bindingsManager.registerBinding({ - key: "DomainQueryService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: DomainQueryService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: DomainQueryService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: DomainQueryService, - }, - } - }); - - bindingsManager.registerBinding({ - key: "DomainQuerySuperagentService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: DomainQuerySuperagentService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: TestDomainQuerySuperagentService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: TestDomainQuerySuperagentService, - }, - } - }); - - bindingsManager.registerBinding({ - key: "CacheService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: LocallyCachedRedisCacheService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: LocallyCachedRedisCacheService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: LocallyCachedRedisCacheService, - }, - } - }); - - bindingsManager.registerBinding({ - key: "RedisClient", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: RedisClient, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: TestRedisClient, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: TestRedisClient, - }, - } - }); - - bindingsManager.registerBinding({ - key: "ConfigurationService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: DefaultConfigurationService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: TestConfigurationService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: TestConfigurationService, - }, - } - }); - - bindingsManager.registerBinding({ - key: "ArweaveResolver", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: ArweaveResolver, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: TestResolverService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: ArweaveResolver, - }, - } - }); - - bindingsManager.registerBinding({ - key: "DnsQuery", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: DnsQuery, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: DnsQuery, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: DnsQuery, - }, - } - }); - - const memoryCaches = new MemoryCacheFactory(); - - const cacheFactoryFunction = (context: interfaces.Context): (str: string) => INamedMemoryCache => { - return (str: string) => { - return memoryCaches.createNamedMemoryCacheFactory(context, str); - }; - }; - - bindingsManager.registerBinding({ - key: "NamedMemoryCacheFactory", - config: { - [EnvironmentConfiguration.Production]: { - type: "factory", - factory: cacheFactoryFunction, - }, - [EnvironmentConfiguration.Development]: { - type: "factory", - factory: cacheFactoryFunction, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "factory", - factory: cacheFactoryFunction, - }, - } - }); - bindingsManager.registerBinding({ - key: "DomainRateLimitService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: DomainRateLimitService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: DomainRateLimitService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: DomainRateLimitService, - }, - } - }); - bindingsManager.registerBinding({ - key: "KuboApiService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: KuboApiService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: KuboApiService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: KuboApiService, - }, - } - }); - bindingsManager.registerBinding({ - key: "HostnameSubstitutionService", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: HostnameSubstitutionService, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: HostnameSubstitutionService, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: HostnameSubstitutionService, - }, - } - }); - bindingsManager.registerBinding({ - key: "NameServiceFactory", - config: { - [EnvironmentConfiguration.Production]: { - type: "class", - theConstructor: NameServiceFactory, - }, - [EnvironmentConfiguration.Development]: { - type: "class", - theConstructor: NameServiceFactory, - }, - [EnvironmentConfiguration.LiveDataIntegration]: { - type: "class", - theConstructor: NameServiceFactory, - }, - } - - }); - return bindingsManager; -}; - -export const createProductionAppContainer = () => { - return createApplicationConfigurationBindingsManager().bindAll(EnvironmentConfiguration.Production); -}; \ No newline at end of file diff --git a/src/dependencies/types/index.ts b/src/dependencies/types/index.ts deleted file mode 100644 index ed0ab1f..0000000 --- a/src/dependencies/types/index.ts +++ /dev/null @@ -1,20 +0,0 @@ -const DITYPES = { - EnsService: Symbol.for("EnsService"), - Web3NameSdkService: Symbol.for("Web3NameSdkService"), - EnsResolverService: Symbol.for("EnsResolverService"), - LoggerService: Symbol.for("LoggerService"), - DomainQuerySuperagentService: Symbol.for("DomainQuerySuperagentService"), - DomainQueryService: Symbol.for("DomainQueryService"), - CacheService: Symbol.for("CacheService"), - NamedMemoryCacheFactory: Symbol("NamedMemoryCacheFactory"), - RedisClient: Symbol("RedisClient"), - ConfigurationService: Symbol("ConfigurationService"), - ArweaveResolver: Symbol("ArweaveResolver"), - DnsQuery: Symbol("DnsQuery"), - DomainRateLimitService: Symbol("DomainRateLimitService"), - KuboApiService: Symbol("KuboApiService"), - HostnameSubstitutionService: Symbol("HostnameSubstitutionService"), - NameServiceFactory: Symbol("NameServiceFactory"), -}; - -export { DITYPES }; diff --git a/src/index.ts b/src/index.ts deleted file mode 100644 index 3b2f487..0000000 --- a/src/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import "reflect-metadata"; -import { Server } from "./server/index.js"; -import { createProductionAppContainer } from "./dependencies/inversify.config.js"; - -// Start main worker process - -const AppContainer = createProductionAppContainer().container; - -AppContainer.bind(Server).to(Server).inSingletonScope(); -const server = AppContainer.get(Server); -server.start(); \ No newline at end of file diff --git a/src/scripts/dump_test_cases.ts b/src/scripts/dump_test_cases.ts deleted file mode 100644 index bbd44a8..0000000 --- a/src/scripts/dump_test_cases.ts +++ /dev/null @@ -1,108 +0,0 @@ -import "reflect-metadata" -import { DefaultConfigurationService, TestConfigurationService } from "../configuration"; -import { LoggerService } from "../services/LoggerService"; -import { ArweaveResolver, arweaveTxIdToArweaveSandboxSubdomainId } from "../services/EnsResolverService/arweave"; -import { EnsService } from "../services/NameService/EnsService"; - -const configurationService = new TestConfigurationService(); -//this is a hack to ensure certain values (i.e. no logging) on the configuration service -//logging is disabled for easy piping -configurationService.set((conf) => { - const defaultConfigService = new DefaultConfigurationService(); - conf.ens = defaultConfigService.get().ens; - conf.arweave = defaultConfigService.get().arweave; - conf.ipfs = defaultConfigService.get().ipfs; - conf.swarm = defaultConfigService.get().swarm; - conf.logging = defaultConfigService.get().logging; - conf.logging.level = "none"; - conf.cache = defaultConfigService.get().cache; - conf.cache.ttl = 5; - conf.router = defaultConfigService.get().router; - conf.ethereum = defaultConfigService.get().ethereum; -}); - -const loggerService = new LoggerService(configurationService); -const ensService = new EnsService(configurationService, loggerService); -const arweaveService = new ArweaveResolver(loggerService); - -const testCases = [ - { - name: "blockranger.eth", - type: "ipfs", - }, - { - name: "fast-ipfs.eth", - type: "ipfs", - }, - { - name: "surveychain.eth", - type: "ipfs", - }, - { - name: "easy-rln.eth", - type: "ipfs", - }, - { - name: "view-code.eth", - type: "ipfs", - }, - { - name: "makesy.eth", - type: "arweave", - }, - { - name: "swarm.eth", - type: "swarm", - }, - { - name: "nick.eth", - type: "ipns", - }, - { - name: "not-a-real-ens-name-ahsalabadkadvhda.eth", //don't register this lmao - type: "none", - } -]; - -const main = async () => { - const results = []; - for (const testCase of testCases) { - const request = { - trace_id: "TEST_TRACE_ID", - }; - const contentHash = await ensService.getContentHash( - request, - testCase.name); - const additionalInfo:Partial<{ - arweave: { - result: string, - query: string, - subdomain_sandbox_id: string - }; - }> = {}; - if(contentHash.error) { - throw contentHash.error; - } - if(testCase.type === "arweave") { - if(!contentHash.result) { - throw "arweave result is null" - } - const ar_id = contentHash.result.split("arweave://")[1]; - const arweaveResult = await arweaveService.resolveArweave(request, ar_id, testCase.name); - const subdomain_sandbox_id = await arweaveTxIdToArweaveSandboxSubdomainId(request, loggerService, ar_id) - if(!subdomain_sandbox_id) { - throw "subdomain_sandbox_id is null" - } - additionalInfo.arweave = { - result: arweaveResult, - query: ar_id, - subdomain_sandbox_id - } - } - - results.push({ name: testCase.name, type: testCase.type, contentHash: contentHash.result, additionalInfo }); - } - console.log(JSON.stringify(results, null, 2)); -}; - -main() \ No newline at end of file diff --git a/src/services/DomainRateLimit/DomainRateLimit.spec.ts b/src/services/DomainRateLimit/DomainRateLimit.spec.ts deleted file mode 100644 index 1aee606..0000000 --- a/src/services/DomainRateLimit/DomainRateLimit.spec.ts +++ /dev/null @@ -1,79 +0,0 @@ -import "reflect-metadata" -import { expect } from 'chai'; -import { describe, it } from 'mocha' -import { SinonStubbedInstance, createStubInstance, stub } from 'sinon'; -import { DomainRateLimitService } from '.'; -import { ILoggerService, LoggerService } from '../LoggerService'; -import { IRedisClient, RedisClient } from '../CacheService'; -import { IRequestContext } from "../lib"; - -describe('DomainRateLimitService', () => { - let redisClient: SinonStubbedInstance; - let logger: ILoggerService; - let service: DomainRateLimitService; - - beforeEach(() => { - redisClient = createStubInstance(RedisClient); - logger = createStubInstance(LoggerService); - service = new DomainRateLimitService(redisClient as any, logger as any); - }); - - it('should increment rate limit and set TTL if not already set', async () => { - const domain = 'test.com'; - const maxQueries = 10; - const intervalInSeconds = 60; - - redisClient.incr.resolves(1); - redisClient.ttl.resolves(-1); - - const request: IRequestContext = { - trace_id: "TEST_TRACE_ID" - } - - const result = await service.incrementRateLimit(request, domain, maxQueries, intervalInSeconds); - - expect(result.countOverMax).to.be.false; - expect(result.count).to.equal(1); - expect(result.ttl).to.equal(intervalInSeconds); - expect(redisClient.expire.calledOnceWith(`rate_limit/${domain}`, intervalInSeconds)).to.be.true; - }); - - it('should increment rate limit and not modify TTL if already set', async () => { - const domain = 'test.com'; - const maxQueries = 10; - const intervalInSeconds = 60; - - const request: IRequestContext = { - trace_id: "TEST_TRACE_ID" - } - - redisClient.incr.resolves(2); - redisClient.ttl.resolves(30); - - const result = await service.incrementRateLimit(request, domain, maxQueries, intervalInSeconds); - expect(result.countOverMax).to.be.false; - expect(result.count).to.equal(2); - expect(result.ttl).to.equal(30); - expect(redisClient.expire.notCalled).to.be.true; - }); - - it('should indicate when count is over max queries', async () => { - const domain = 'test.com'; - const maxQueries = 10; - const intervalInSeconds = 60; - - const request: IRequestContext = { - trace_id: "TEST_TRACE_ID" - } - - redisClient.incr.resolves(11); - redisClient.ttl.resolves(30); - - const result = await service.incrementRateLimit(request, domain, maxQueries, intervalInSeconds); - - expect(result.countOverMax).to.be.true; - expect(result.count).to.equal(11); - expect(result.ttl).to.equal(30); - expect(redisClient.expire.notCalled).to.be.true; - }); -}); \ No newline at end of file diff --git a/src/services/DomainRateLimit/index.ts b/src/services/DomainRateLimit/index.ts deleted file mode 100644 index 51ae03d..0000000 --- a/src/services/DomainRateLimit/index.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { inject, injectable } from "inversify"; -import { DITYPES } from "../../dependencies/types"; -import { IRedisClient } from "../CacheService"; -import { ILoggerService } from "../LoggerService"; -import { IRequestContext } from "../lib"; - -export interface IDomainRateLimitServiceRet { - countOverMax: boolean, - count: number, - ttl: number -} - -export interface IDomainRateLimitService { - incrementRateLimit(request: IRequestContext, domain: string, maxQueries: number, intervalInSeconds: number): Promise; -} - -@injectable() -export class DomainRateLimitService implements IDomainRateLimitService { - _redisClient: IRedisClient; - _logger: ILoggerService; - - constructor(@inject(DITYPES.RedisClient) redisClient: IRedisClient, @inject(DITYPES.LoggerService) logger: ILoggerService) { - this._redisClient = redisClient; - this._logger = logger; - } - - async incrementRateLimit(request: IRequestContext, domain: string, maxQueries: number, intervalInSeconds: number): Promise { - const key = `rate_limit/${domain}`; - const count = await this._redisClient.incr(key); - var ttl = await this._redisClient.ttl(key); - this._logger.debug('Rate limit incremented', { - ...request, - origin: 'DomainRateLimitService', - context: { - key: key, - count: count, - ttl: ttl - } - }); - if(ttl < 1) { - await this._redisClient.expire(key, intervalInSeconds); - ttl = intervalInSeconds - this._logger.debug(`Rate limit expired, setting new TTL`, { - ...request, - origin: 'DomainRateLimitService', - context: { - key: key, - count: count, - ttl: ttl - } - }); - } - return { - countOverMax: count > maxQueries, - count: count, - ttl: ttl - } - } -} diff --git a/src/services/EnsResolverService/utils.ts b/src/services/EnsResolverService/utils.ts deleted file mode 100644 index 9faa1ca..0000000 --- a/src/services/EnsResolverService/utils.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { ProxyRecord, Record } from "."; -import { recordNamespaceToUrlHandlerMap } from "./const"; -import { ensureTrailingSlash } from "../../utils"; -import { IConfigurationService } from "../../configuration"; -import { ILoggerService } from "../LoggerService"; -import { arweaveUrlToSandboxSubdomain } from "./arweave"; -import { IRequestContext } from "../lib"; -export const recordToProxyRecord = async ( - request: IRequestContext, - configurationSvc: IConfigurationService, - logger: ILoggerService, - record: NonNullable -): Promise => { - const configuration = configurationSvc.get(); - var path = "/"; - if (record._tag === "ens-socials-redirect") { - const redirectUrl = new URL( - configuration.ens.socialsEndpoint(record.ensName), - ); - return { - ...record, - XContentLocation: redirectUrl.origin, - XContentPath: ensureTrailingSlash(redirectUrl.pathname + redirectUrl.search), - }; - } else if (record._tag === "Record") { - if (record.codec === "ipfs-ns" || record.codec === "ipns-ns") { - const url = new URL(configuration.ipfs.backend); - const urlSafeIpfsOrIpns = recordNamespaceToUrlHandlerMap[record.codec]; - var path = "/"; - if (configuration.ipfs.subdomainSupport) { - - let DoHContentIdentifier = record.DoHContentIdentifier; - if(record.codec === "ipns-ns") { - DoHContentIdentifier = normalizeUrlFragmentForIpfsSubdomainGateway(DoHContentIdentifier); - } - url.host = `${DoHContentIdentifier}.${urlSafeIpfsOrIpns}.${url.host}`; - } else { - path = `/${urlSafeIpfsOrIpns}/${record.DoHContentIdentifier}/`; - } - return { - ...record, - XContentLocation: url.toString(), - XContentPath: path, - }; - } else if (record.codec === "arweave-ns") { - const backend = new URL(configuration.arweave.backend); - return { - ...record, - XContentLocation: ( - await arweaveUrlToSandboxSubdomain( - request, - logger, - record.DoHContentIdentifier, - backend, - ) - ).toString(), - XContentPath: ensureTrailingSlash("/" + record.DoHContentIdentifier), - }; - } else if (record.codec === "swarm") { - return { - ...record, - XContentLocation: configuration.swarm.backend, - XContentPath: ensureTrailingSlash( - "/bzz/" + record.DoHContentIdentifier, - ), - }; - } - //record.codec should be never due to exhaustivity check - return record.codec; - } else { - //record should be never due to exhaustivity check - return record; - } -}; -export function normalizeUrlFragmentForIpfsSubdomainGateway(DoHContentIdentifier: string): string { - return [...DoHContentIdentifier].map((c) => { - if(c == '.') { - return '-'; - } else if(c == '-') { - return '--'; - } else { - return c; - } - }).join(""); -} - diff --git a/src/services/HostnameSubstitutionService/index.ts b/src/services/HostnameSubstitutionService/index.ts deleted file mode 100644 index 4ce500c..0000000 --- a/src/services/HostnameSubstitutionService/index.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { inject, injectable } from "inversify"; -import { DITYPES } from "../../dependencies/types"; -import { IConfigurationService } from "../../configuration"; -import { ILoggerService } from "../LoggerService"; - -export type SubstitutionConfiguration = { - [key: string]: string; -} - -export interface IHostnameSubstitutionService { - substituteHostname(hostname: string): string; -} - -function parseRawConfig(rawConfig: string): SubstitutionConfiguration { - try { - return JSON.parse(Buffer.from(rawConfig, 'base64').toString()); - } catch { - // Fallback to plain JSON - } - try { - return JSON.parse(rawConfig); - } catch(e) { - throw new Error(`Invalid hostname substitution configuration: ${e.message}`); - } -} - -@injectable() -export class HostnameSubstitutionService implements IHostnameSubstitutionService { - _configuration: SubstitutionConfiguration; - _logger: ILoggerService; - - constructor(@inject(DITYPES.ConfigurationService) configurationService: IConfigurationService, - @inject(DITYPES.LoggerService) logger: ILoggerService) { - const config = parseRawConfig(configurationService.get().router.hostnameSubstitutionConfig); - this._configuration = {}; - this._logger = logger; - const logger_context = { - origin: 'HostnameSubstitutionService', - trace_id: "UNDEFINED_TRACE_ID", - } - for (const key in config) { - if(typeof config[key] === "string") { - this._configuration[key] = config[key]; - logger.debug(`Registered suffix ${key}=${config[key]}`, { - ...logger_context, - context: { - key, - value: config[key], - configuration: this._configuration - } - }); - } else { - logger.error('Invalid hostname substitution configuration', { - ...logger_context, - context: { - key, - value: config[key] - } - }); - } - } - - logger.info('Hostname substitution service initialized', { - ...logger_context, - context: { - substitutions: this._configuration - } - }); - } - - substituteHostname(hostname: string): string { - const logger_context = { - origin: 'HostnameSubstitutionService', - trace_id: "UNDEFINED_TRACE_ID", - } - for (const key in this._configuration) { - if (hostname.endsWith(key)) { - const new_hostname = hostname.substring(0, hostname.length - key.length) + this._configuration[key]; - this._logger.debug(`Substituted hostname ${hostname} -> ${new_hostname}`, { - ...logger_context, - context: { - key, - value: this._configuration[key], - hostname, - new_hostname - } - }); - return new_hostname; - } - } - this._logger.debug(`No substitution for hostname ${hostname}`, { - ...logger_context, - context: { - hostname - } - }); - return hostname; - } -} \ No newline at end of file diff --git a/src/services/KuboApiService/index.ts b/src/services/KuboApiService/index.ts deleted file mode 100644 index 8698e23..0000000 --- a/src/services/KuboApiService/index.ts +++ /dev/null @@ -1,101 +0,0 @@ -import { inject, injectable } from "inversify"; -import { DITYPES } from "../../dependencies/types"; -import { ILoggerService } from "../LoggerService"; -import superagent from "superagent"; -import { IConfigurationService } from "../../configuration"; -import { normalizeUrlFragmentForIpfsSubdomainGateway } from "../EnsResolverService/utils"; -import { IRequestContext } from "../lib"; - -export interface IKuboApiService { - resolveIpnsName(request: IRequestContext, name: string): Promise; -} - -@injectable() -export class KuboApiService { - private loggerService: ILoggerService; - private configurationService: IConfigurationService; - private agent: superagent.SuperAgentStatic; - - private getUrl() { - return this.configurationService.get().ipfs.kubo_api_url; - } - - constructor( - @inject(DITYPES.LoggerService) loggerService: ILoggerService, - @inject(DITYPES.ConfigurationService) configurationService: IConfigurationService - ) { - this.loggerService = loggerService; - this.configurationService = configurationService; - this.agent = superagent; - const logObject = { - trace_id: "UNKNOWN_TRACE_ID", - origin: "KuboApiService", - context: { - url: this.getUrl() - } - }; - if(!this.getUrl()) { - this.loggerService.info("Kubo API URL is not set, all requests will be ignored.", logObject); - } else { - this.loggerService.info(`Kubo API URL is set`, logObject); - } - } - - async resolveIpnsName(request: IRequestContext, name: string): Promise { - try { - const url = this.getUrl()?.toString(); - if(!url) { - this.loggerService.debug("Superagent is not initialized", { - ...request, - origin: "KuboApiService", - context: { - name, - } - }); - return null; - } - var superagentRequest = this.agent('POST', `${url}api/v0/name/resolve`).query({ - arg: normalizeUrlFragmentForIpfsSubdomainGateway(name.split("ipns://")[1]), - recursive: true, - nocache: true, - "dht-record-count": 16, - "dht-timeout": "1m0s", - stream: false - }); - - if(this.configurationService.get().ipfs.auth) { - superagentRequest = superagentRequest.set("Authorization", `Basic ${this.configurationService.get().ipfs.auth}`); - } - - const response = await superagentRequest.timeout(this.configurationService.get().ipfs.kubo_timeout_ms).ok((_res) => true); - - if(response.error) { - this.loggerService.debug('Failed to resolve IPNS name', - { - ...request, - origin: 'KuboApiService', - context: { - name, - response: response - } - }); - return null; - } else { - return response.body?.Path || null; - } - } catch (error) { - this.loggerService.error( - 'failed to statically resolve IPNS name', - { - ...request, - origin: 'KuboApiService', - context: { - name, - error: error - } - }); - return null; - } - } - -} \ No newline at end of file diff --git a/src/services/LoggerService/index.ts b/src/services/LoggerService/index.ts deleted file mode 100644 index edac415..0000000 --- a/src/services/LoggerService/index.ts +++ /dev/null @@ -1,105 +0,0 @@ -import winston, { createLogger, format, transports } from "winston"; -import { inject, injectable } from "inversify"; -import { DITYPES } from "../../dependencies/types"; -import { IConfigurationService } from "../../configuration"; - -type winstonLogger = ReturnType; -export type ILoggerService = { - error: (message: string, context: ILoggerServiceContext) => void; - warn: (message: string, context: ILoggerServiceContext) => void; - info: (message: string, context: ILoggerServiceContext) => void; - debug: (message: string, context: ILoggerServiceContext) => void; -}; - -export type ILoggerServiceContext = { - origin: string; - trace_id: string; - context?: Object; -} - -@injectable() -export class LoggerService implements ILoggerService { - _configurationService: IConfigurationService; - _logger: winston.Logger; - - constructor(@inject(DITYPES.ConfigurationService) configurationService: IConfigurationService) { - this._configurationService = configurationService; - this._logger = createLogger({ - level: this._configurationService.get().logging.level, - format: format.json(), - defaultMeta: { service: "limo-proxy" }, - }).add( - new transports.Console({ - format: format.json(), - }), - ); - } - - internal_log(severity: 'warn' | 'error' | 'info' | 'debug', msg: string, context: ILoggerServiceContext) { - this._logger.log({ - level: severity, - message: msg, - ...context, - }); - } - - public warn(msg: string, context: ILoggerServiceContext) { - this.internal_log('warn', msg, context); - } - public error(msg: string, context: ILoggerServiceContext) { - this.internal_log('error', msg, context); - } - public info(msg: string, context: ILoggerServiceContext) { - this.internal_log('info', msg, context); - } - public debug(msg: string, context: ILoggerServiceContext) { - this.internal_log('debug', msg, context); - } -} - -type TestLoggerServiceEnum = 'warn' | 'error' | 'info' | 'debug'; -type TestLoggerServiceMsg = { - severity: TestLoggerServiceEnum, - message: string, - ctx: any -} - -@injectable() -/** - * This is a test logger service for configurable squelching of logs - * when debugging tests, call logMessages before expect values to interrogate the log stack - * note: the debug configuration should automatically set log level to DEBUG so reusing the logger is fine - */ -export class TestLoggerService implements ILoggerService { - _configurationService: IConfigurationService; - _logger: ILoggerService; - msgs: TestLoggerServiceMsg[] = []; - constructor(@inject(DITYPES.ConfigurationService) configurationService: IConfigurationService) { - this._configurationService = configurationService; - this._logger = new LoggerService(configurationService); - }; - - public warn(msg: string) { - this.msgs.push({severity: 'warn', message: msg, ctx: null}); - } - public error(msg: string, ctx: any) { - this.msgs.push({severity: 'error', message: msg, ctx: ctx}); - } - public info(msg: string) { - this.msgs.push({severity: 'info', message: msg, ctx: null}); - } - public debug(msg: string) { - this.msgs.push({severity: 'debug', message: msg, ctx: null}); - } - public logMessages() { - for(let i = 0; i < this.msgs.length; i++) { - let msg = this.msgs[i]; - this._logger[msg.severity](msg.message, msg.ctx); - } - this.clearMessages() - } - - public clearMessages() { - this.msgs = []; - } -} \ No newline at end of file diff --git a/src/services/NameService/EnsService.ts b/src/services/NameService/EnsService.ts deleted file mode 100644 index bf5526b..0000000 --- a/src/services/NameService/EnsService.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { FallbackProvider, JsonRpcProvider, AbstractProvider } from "ethers"; -import { inject, injectable } from "inversify"; -import { decode, getCodec } from "@ensdomains/content-hash"; -import { ILoggerService } from "../LoggerService"; -import { DITYPES } from "../../dependencies/types"; -import { FallbackProviderConfig } from "ethers/lib.commonjs/providers/provider-fallback"; -import { IConfigurationService } from "../../configuration"; -import { IRequestContext } from "../lib"; -import { ErrorType, INameService } from "."; -import { ErrorSuccess } from "../../utils/ErrorSuccess"; -const getEnsContentHash = async (request: IRequestContext, provider: AbstractProvider, logger: ILoggerService, name: string):Promise => { - const res = await provider.getResolver(name); - if (!res) { - logger.debug('no resolver', { - ...request, - origin: 'getEnsContentHash', - context: { - name: name - } - }); - return null; - } - try { - const contentHash = await res.getContentHash(); - return contentHash - } catch (e) { - if (e?.code === "UNSUPPORTED_OPERATION" && e?.info?.data) { - logger.debug( - 'entering fallback', - { - ...request, - origin: 'getEnsContentHash', - context: { - name: name, - error: e - } - } - ); - const content = decode(e.info.data); - const codec = getCodec(e.info.data); - if (!codec || !content) { - logger.error( - ///`EnsService: unsupported fallback decode operation, codec: name: ${name}, codec: ${codec}, content: ${content}`, - 'unsupported fallback decode operation', - { - ...request, - origin: 'getEnsContentHash', - context: { - name: name, - codec: codec, - content: content, - error: e, - } - } - ); - return null; - } - return `${codec}://${content}`; - } else { - throw e; - } - } - } - -@injectable() -export class EnsService implements INameService { - _configurationService: IConfigurationService; - provider: FallbackProvider; - _logger: ILoggerService; - constructor(@inject(DITYPES.ConfigurationService) configurationService: IConfigurationService, @inject(DITYPES.LoggerService) logger: ILoggerService) { - this._configurationService = configurationService;0 - const configuration = this._configurationService.get() - const providers = [new JsonRpcProvider(configuration.ethereum.rpc, undefined, {staticNetwork: true})]; - if (configuration.ethereum.failover_primary) { - logger.info("EnsService: adding failover_primary", - { - trace_id: "UNKNOWN_TRACE_ID", - origin: "EnsService" - }) - providers.push(new JsonRpcProvider(configuration.ethereum.failover_primary, undefined, {staticNetwork: true})); - } - if (configuration.ethereum.failover_secondary) { - logger.info("EnsService: adding failover_secondary", - { - trace_id: "UNKNOWN_TRACE_ID", - origin: "EnsService" - }) - providers.push(new JsonRpcProvider(configuration.ethereum.failover_secondary, undefined, {staticNetwork: true})); - } - const providers_as_config:FallbackProviderConfig[] = providers.map((provider, index) => { - provider._getConnection().timeout = configuration.ethereum.provider_timeout_ms; - return { - provider, - priority: index, - weight: 1, - stallTimeout: configuration.ethereum.provider_stall_timeout_ms, - } - }); - - this.provider = new FallbackProvider(providers_as_config, configuration.ethereum.quorum); - this._logger = logger; - } - - async getContentHash( - request: IRequestContext, - name: string, - ): Promise< - ErrorSuccess< - string | null, - "IEnsServiceError", - ErrorType, - { } - > - > { - const res = await getEnsContentHash(request, this.provider, this._logger, name); - return { - error: false, - result: res, - }; - } -} \ No newline at end of file diff --git a/src/services/NameService/Web3NameSdkService.ts b/src/services/NameService/Web3NameSdkService.ts deleted file mode 100644 index f607f07..0000000 --- a/src/services/NameService/Web3NameSdkService.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { createWeb3Name } from '@web3-name-sdk/core' -import { decode, getCodec } from "@ensdomains/content-hash"; -import { IRequestContext } from '../lib'; -import { ErrorSuccess } from '../../utils/ErrorSuccess'; -import { ILoggerService } from '../LoggerService'; -import { IConfigurationService } from '../../configuration'; -import { ErrorType, INameService } from '.'; -import { inject, injectable } from 'inversify'; -import { DITYPES } from '../../dependencies/types'; - - -@injectable() -export class Web3NameSdkService implements INameService { - _configurationService: IConfigurationService; - _logger: ILoggerService; - //type exposed by imports - _web3name: ReturnType; - - constructor(@inject(DITYPES.ConfigurationService) configurationService: IConfigurationService, @inject(DITYPES.LoggerService) logger: ILoggerService) { - this._configurationService = configurationService; - this._logger = logger; - this._web3name = createWeb3Name({ - isDev: false, - rpcUrl: this._configurationService.get().ethereum.rpc - }) - } - - async getContentHash( - request: IRequestContext, - name: string, - ): Promise< - ErrorSuccess< - string | null, - "IEnsServiceError", - ErrorType, - { } - > - > { - const res = await this._web3name.getContentHash({name, rpcUrl: this._configurationService.get().gnosis.rpc}); - if(!res) { - return { - error: false, - result: null, - }; - } - //TODO: this is the same as the fallback in EnsService, should be refactored - const codec = getCodec(res); - const content = decode(res); - if (!codec || !content) { - this._logger.error( - 'unsupported fallback decode operation', - { - ...request, - origin: 'Web3NameSdkService', - context: { - name, - codec, - content, - } - } - ); - return { - error: false, - result: null, - }; - } - const contentHashDecoded = `${codec}://${content}`; - this._logger.debug('getContentHash', { - ...request, - origin: 'Web3NameSdkService', - context: { - name, - contentHash: contentHashDecoded - } - }); - return { - error: false, - result: contentHashDecoded, - }; - } -} \ No newline at end of file diff --git a/src/services/NameService/index.ts b/src/services/NameService/index.ts deleted file mode 100644 index 17d21ae..0000000 --- a/src/services/NameService/index.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { inject, injectable } from "inversify"; -import { ErrorSuccess } from "../../utils/ErrorSuccess"; -import { IRequestContext } from "../lib/index"; -import { ILoggerService } from "../LoggerService"; -import { DITYPES } from "../../dependencies/types"; -export type Tag = "IEnsServiceError"; -export type ErrorType = "error"; - -export interface INameService { - getContentHash( - request: IRequestContext, - name: string, - ): Promise>; -} - -export interface INameServiceFactory { - getNameServiceForDomain(request: IRequestContext, domain: string): INameService; -} - - -//FIXME: this isn't how inversify expects factories, but it doesn't really matter -@injectable() -export class NameServiceFactory implements INameServiceFactory { - _logger: ILoggerService; - _ensService: INameService; - _web3NameSdkService: INameService; - - constructor( - @inject(DITYPES.LoggerService) logger: ILoggerService, - @inject(DITYPES.EnsService) ensService: INameService, - @inject(DITYPES.Web3NameSdkService) web3NameSdkService: INameService, - ) { - this._logger = logger; - this._ensService = ensService; - this._web3NameSdkService = web3NameSdkService; - } - - getNameServiceForDomain(request: IRequestContext, domain: string): INameService { - if(domain.endsWith(".gno")) { - this._logger.debug("Using Web3NameSdkService for domain " + domain, { - ...request, - origin: "NameServiceFactory", - }); - return this._web3NameSdkService; - } - this._logger.debug("Using EnsService for domain " + domain, { - ...request, - origin: "NameServiceFactory", - }); - return this._ensService; - } -} \ No newline at end of file diff --git a/src/test/TestCaseGenerator.ts b/src/test/TestCaseGenerator.ts deleted file mode 100644 index 2a1a954..0000000 --- a/src/test/TestCaseGenerator.ts +++ /dev/null @@ -1,72 +0,0 @@ -type PossibleTypes = boolean | string; - -type InputObject = { [key: string]: PossibleTypes[] }; - -export function cartesianProduct(input: InputObject):Array<{ [x: string]: PossibleTypes; }> { - const keys = Object.keys(input); - const result = keys.reduce((product, key) => { - const newProduct:any[] = []; - for (const obj of product) { - for (const value of input[key]) { - newProduct.push({ ...obj, [key]: value }); - } - } - return newProduct; - }, [{}]); - return result; -} - -function excludeProperties(obj: T, keys: K[]): Omit { - let result = { ...obj }; - keys.forEach(key => delete result[key]); - return result; -} - -export class TestRunner { - - _testCases: (T&{options: O})[]; - runners: {name: string, run: (thisvar: Mocha.Suite) => () => void}[] = []; - - constructor(private testCases: (T&{options: O})[]) { - this._testCases = testCases; - } - - registerTests(description: string, inputToLog: K[], optionsToExclude: X[], filterFunction: (testCase: T & {options: Omit}) => boolean, func: (testCase: T & {options: Omit}) => Promise): void { - const cases = this._testCases.map((testCase: T&{options: O}) => { - const options = JSON.parse(JSON.stringify(testCase.options)) as typeof testCase.options; - const newOptions = excludeProperties(options, optionsToExclude) - const ret: T & {options: Omit} = { - ...testCase, - options: newOptions - }; - return ret - }).filter((testCase, index, self) => - index === self.findIndex((t) => (JSON.stringify(t) === JSON.stringify(testCase)))).flatMap((x) => x).filter(filterFunction); - - const runner = cases.map((testCase) => { - const nameBuilder:string[] = [] - Object.keys(testCase).forEach((key) => { - if(inputToLog.includes(key as K)) { - nameBuilder.push(`${key}: ${JSON.stringify((testCase as unknown as any)[key])}`) - } - }); - Object.keys(testCase.options).forEach((key) => { - nameBuilder.push(`${key}: ${JSON.stringify((testCase.options as unknown as any)[key])}`) - }); - const name = description +' ('+nameBuilder.join(", ")+')'; - return { - name, - run: (thisvar: Mocha.Suite) => func.bind(thisvar, testCase) - } - }) - this.runners.push(...runner) - } - - runTests(thisvar: Mocha.Suite): void { - let runner = this.runners.shift(); - while(runner) { - it(runner.name, runner.run(thisvar)); - runner = this.runners.shift(); - } - } -} \ No newline at end of file diff --git a/src/test/TestResolverService.ts b/src/test/TestResolverService.ts deleted file mode 100644 index c8084c8..0000000 --- a/src/test/TestResolverService.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { ErrorSuccess } from "../utils/ErrorSuccess"; -import { injectable } from "inversify"; -import { INameService, Tag, ErrorType } from "../services/NameService"; -import { IArweaveResolver } from "../services/EnsResolverService/arweave"; -import { IRequestContext } from "../services/lib"; - -@injectable() -export class TestResolverService implements INameService, IArweaveResolver { - mappings = new Map(); - //TODO: this is a hack, scripts/dump_test_cases.ts should run arweave cases through resolveArweave as an extra parameter - resolveArweave: (request: IRequestContext, tx_id: string, ens_name: string) => Promise = async (request: IRequestContext, tx_id: string, ens_name: string) => { - const res = this.mappings.get(tx_id); - if (res === undefined) { - throw new Error(`TestResolverService: no mapping for ${ens_name}`); - } - if (typeof res === "string") { - return res.startsWith("arweave://") ? res.substring("arweave://".length) : res; - } - if (!res || res.error) { - throw new Error(res?.reason); - } - throw new Error("TestResolverService: invalid mapping"); - } - - getContentHash( - request: IRequestContext, - name: string - ): Promise> { - const res = this.mappings.get(name); - if (res === undefined) { - throw new Error(`TestResolverService: no mapping for ${name}`); - } - if (!res) { - return Promise.resolve({ - error: false, - result: null, - }); - } - if (typeof res === "string") { - return Promise.resolve({ - error: false, - result: res, - }); - } - if (res.error === "throws") { - throw new Error(res.reason); - } - return Promise.resolve({ - error: true, - reason: res.reason, - _tag: "IEnsServiceError", - _type: "error", - }); - } - - set(name: string, value: string | null | { error: true | 'throws'; reason: string; }) { - this.mappings.set(name, value); - } -} diff --git a/src/test/helper/index.ts b/src/test/helper/index.ts deleted file mode 100644 index be51e73..0000000 --- a/src/test/helper/index.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { Container } from "inversify"; -import { DITYPES } from "../../dependencies/types"; -import { INameService } from "../../services/NameService"; -import { TestResolverService } from "../TestResolverService"; -import { IDomainQuerySuperagentService, TestDomainQuerySuperagentService } from "../../services/DomainsQueryService"; -import { IRedisClient, TestLaggyRedisClientProxy, TestRedisClient } from "../../services/CacheService"; -import { IConfigurationService, TestConfigurationService } from "../../configuration"; -import { IArweaveResolver } from "../../services/EnsResolverService/arweave"; -import { createApplicationConfigurationBindingsManager } from "../../dependencies/inversify.config"; -import { EnvironmentConfiguration } from "../../dependencies/BindingsManager"; -import { IHostnameSubstitutionService } from "../../services/HostnameSubstitutionService"; - -export type HarnessType = { - AppContainer: Container; - testEnsService: TestResolverService; - testRedisClient: TestRedisClient; - testArweaveResolverService: TestResolverService; - testDomainQuerySuperagentService: TestDomainQuerySuperagentService; - testConfigurationService: TestConfigurationService; - hostnameSubstitionService: IHostnameSubstitutionService; - web3NameSdkService: TestResolverService; -}; - -export const buildAppContainer = ():HarnessType => { - const bindingsManager = createApplicationConfigurationBindingsManager(); - const AppContainer = bindingsManager.bindAll(EnvironmentConfiguration.Development).container; - return { - AppContainer, - testEnsService: AppContainer.get(DITYPES.EnsService) as TestResolverService, - testRedisClient: AppContainer.get(DITYPES.RedisClient) as TestRedisClient, - testArweaveResolverService: AppContainer.get(DITYPES.ArweaveResolver) as TestResolverService, - testDomainQuerySuperagentService: AppContainer.get(DITYPES.DomainQuerySuperagentService) as TestDomainQuerySuperagentService, - testConfigurationService: AppContainer.get(DITYPES.ConfigurationService) as TestConfigurationService, - hostnameSubstitionService: AppContainer.get(DITYPES.HostnameSubstitutionService), - web3NameSdkService: AppContainer.get(DITYPES.Web3NameSdkService) as TestResolverService, - }; -}; \ No newline at end of file diff --git a/src/test/integration.spec.ts b/src/test/integration.spec.ts deleted file mode 100644 index a2891aa..0000000 --- a/src/test/integration.spec.ts +++ /dev/null @@ -1,714 +0,0 @@ -import "reflect-metadata" -import { describe } from 'mocha'; -import { expect } from 'chai'; -// @ts-ignore //bug with parsing the type assertion -import cases from './cases.json' assert { type: "json" }; -import {Server as ProxyServer} from '../server/index' -import {HarnessType, buildAppContainer} from "./helper/index"; -import {RequestMethod, RequestOptions, createRequest, createResponse} from "node-mocks-http" -import { getPeerId } from "../services/EnsResolverService"; -import { DITYPES } from "../dependencies/types"; -import { TestRunner, cartesianProduct } from "./TestCaseGenerator"; -import EventEmitter from "events"; -import { Request, Response } from "express"; -import { ParamsDictionary } from "express-serve-static-core"; -import { ParsedQs } from "qs"; -import { TestLoggerService } from "../services/LoggerService"; -import { normalizeUrlFragmentForIpfsSubdomainGateway } from "../services/EnsResolverService/utils"; -import { TestLaggyRedisClientProxy } from "../services/CacheService"; -import { IRequestContext } from "../services/lib"; -import { TestResolverService } from "./TestResolverService"; - -type TestCaseType = { - name: string, - type: "ipfs" | "ipns" | "arweave" | "swarm" | "none", - contentHash?: string, - additionalInfo: Partial<{ - arweave: { - result: string, - query: string, - subdomain_sandbox_id: string, - }; - }> -} - -interface Options { - subdomainSupport: boolean, - ensSocialsRedirect: boolean, - blacklisted: boolean | 'throws', - dohQueryType: "A" | "AAAA" | "CNAME" | "TXT", - ensError: boolean | 'throws', - redisIsLaggy: boolean, -} -const possibleOptions:Options[] = cartesianProduct({subdomainSupport: [true, false], ensSocialsRedirect: [true, false], blacklisted: [true, false, 'throws'], dohQueryType: ["A", "AAAA", "CNAME", "TXT"], ensError: [false, 'throws'], redisIsLaggy: [false, true]}) as any as Options[]; -var testCases = (cases as TestCaseType[]).map((testCase) => { - return possibleOptions.map((options) => { - return { - ...testCase, - options - } - }) -}).flatMap((x) => x); -const gen = new TestRunner(testCases); - -type HarnessProxyServerPayloadType = { - proxyServer: ProxyServer; -} | { - caddyServer: ProxyServer; -} | { - dohServerGetRequest: ProxyServer; -} - -function isProxyServerPayloadType(payload: any): payload is {proxyServer: ProxyServer} { - return payload.proxyServer !== undefined; -} - -function isCaddyServerPayloadType(payload: any): payload is {caddyServer: ProxyServer} { - return payload.caddyServer !== undefined; -} - -function isDohServerGetPayloadType(payload: any): payload is {dohServer: ProxyServer} { - return payload.dohServerGetRequest !== undefined; -} - -type HarnessPayloadType = HarnessProxyServerPayloadType; - -const harness = (harnessInput: HarnessType) => (payload: HarnessPayloadType) => - async (v:TestCaseType&{options: Options}) => - { - var {type, contentHash, additionalInfo, options} = v; - - if(options.redisIsLaggy) { - harnessInput.testRedisClient.setProxy(new TestLaggyRedisClientProxy()); - } - - harnessInput.testConfigurationService.set((conf) => { - conf.ipfs.subdomainSupport = options.subdomainSupport; - conf.ens.socialsEndpointEnabled = options.ensSocialsRedirect; - }) - /* - the distinction between these two variables is important: - - nameResolvedToEnsName is the name that the query logic will (should) see - - nameFromHostMayReferToSubdomainOfHostedProxyAddress is the name that the client will send to the server - -- this means that it CAN be a .eth domain, or it can be a proxied domain such as "vitalik.eth.limo" - -- the underlying assumption is that the server should be agnostic to the difference between these two - -- the server should only care about the underlying ENS name - -- if there's an underlying bug in the public facing interfaces of the server, that bug will end up passing the wrong name to either the query services or otherwise not respect the actual .eth ENS name - - for most of the test cases, nameResolvedToEnsName === nameFromHostMayReferToSubdomainOfHostedProxyAddress - */ - - const nameResolvedToEnsName = harnessInput.hostnameSubstitionService.substituteHostname(v.name); - const nameFromHostMayReferToSubdomainOfHostedProxyAddress = v.name; - - var testEnsEnsServiceExpectedValue : string | null | {error: true | 'throws', reason: string} | undefined = contentHash; - if(options.ensError) { - testEnsEnsServiceExpectedValue = {error: options.ensError, reason: "test"}; - - } - - const resolvers = [harnessInput.testEnsService, harnessInput.web3NameSdkService]; - - var theRealTestResolverService: TestResolverService; - - if(nameResolvedToEnsName.endsWith("eth")) { - theRealTestResolverService = harnessInput.testEnsService; - } else if (nameResolvedToEnsName.endsWith("gno")) { - theRealTestResolverService = harnessInput.web3NameSdkService; - } else { - throw "Test case non-totality error" - } - - if(testEnsEnsServiceExpectedValue !== undefined) { - theRealTestResolverService.set(nameResolvedToEnsName, testEnsEnsServiceExpectedValue); - } - - //poison the other resolvers to ensure our factory selects the correct one - resolvers.filter((resolver) => resolver !== theRealTestResolverService).forEach((resolver) => { - if(testEnsEnsServiceExpectedValue === undefined) { - resolver.set(nameResolvedToEnsName, "ASDFASDDFASDHDAHD bad value"); - } else { - //implicit poisoning, undefined is the default - } - }); - - if(additionalInfo.arweave) { - harnessInput.testArweaveResolverService.set(additionalInfo.arweave.query, additionalInfo.arweave.result); - } - if(options.blacklisted) { - if(options.blacklisted === 'throws') { - //if the service errors, we want to be unavailable - harnessInput.testDomainQuerySuperagentService.error = true; - } else { - harnessInput.testDomainQuerySuperagentService.setBlacklist(nameResolvedToEnsName, true); - } - } - const request = createRequestEnsureTotality(payload, nameFromHostMayReferToSubdomainOfHostedProxyAddress, options); - const req = createRequest(request); - const res = createResponse({ - eventEmitter: EventEmitter - }); - var busyWaiting = true; - res.on('end', () => { - busyWaiting = false; - }); - await callPayloadEnsureTotality(payload, req, res); - while(busyWaiting) { - await new Promise((resolve) => setTimeout(resolve, 10)); - } - const _result = res._getData(); - if(!res._isEndCalled()) { - throw "Response not ended"; - } - const content_location = res.getHeader("x-content-location"); - const content_path = res.getHeader("x-content-path"); - const content_storage_type = res.getHeader("x-content-storage-type") - - return { - _result, - res, - content_location, - content_path, - content_storage_type - } -} - -describe('Proxy API Integration Tests', function () { - var harnessInput: HarnessType; - var server: ProxyServer; - var commonSetup: any; //not even the language server can figure out what this is - - beforeEach(() => { - let r = buildAppContainer(); - r.AppContainer.bind(ProxyServer).to(ProxyServer).inSingletonScope(); - server = r.AppContainer.get(ProxyServer); - harnessInput = r; - commonSetup = harness(harnessInput)({proxyServer: server}) - }); - - afterEach(() => { - harnessInput.AppContainer.unbindAll(); - harnessInput = null as any; - server = null as any; - commonSetup = null as any; - }); - - gen.registerTests("normal blacklist behavior", ["name", "type"], ["ensSocialsRedirect", "dohQueryType"], function(testCase) { - return testCase.options.blacklisted === true || testCase.options.blacklisted === 'throws'; - }, async function(testCase) { - var {type, name, contentHash, options} = testCase - - const { _result, res } = await commonSetup(testCase); - - if(options.blacklisted === 'throws') { - var expectedResponseCode = 200; - if(options.ensError) { - expectedResponseCode = 500; - } else if(testCase.type === "none" && !options.ensSocialsRedirect) { - expectedResponseCode = 404; - } - expect(res.statusCode).to.be.equal(expectedResponseCode); - } else { - expect(res.statusCode).to.be.equal(451); - } - }); - - gen.registerTests("subdomain blacklist behavior", ["name", "type"], ["ensSocialsRedirect", "dohQueryType"], function(testCase) { - return testCase.options.blacklisted === true || testCase.options.blacklisted === 'throws'; - }, async function(originalTestCase) { - const subdomains = ["asdf", "www", "a.b.c", "asdf.gsdh"]; - for (const subdomain of subdomains) { - - var testCase = {...originalTestCase}; - testCase.name = subdomain + "." + originalTestCase.name; - if(originalTestCase.options.blacklisted === 'throws') { - //if the service errors, we want to be unavailable - harnessInput.testDomainQuerySuperagentService.error = true; - } else { - harnessInput.testDomainQuerySuperagentService.setBlacklist(harnessInput.hostnameSubstitionService.substituteHostname(originalTestCase.name), true); - } - const fudge = JSON.parse(JSON.stringify(testCase)); - fudge.options.blacklisted = false; //we don't want the subdomain blacklisted, just the original domain - - const { _result, res } = await commonSetup(fudge); - - var {options} = testCase - if(options.blacklisted === 'throws') { - var expectedResponseCode = 200; - if(options.ensError) { - expectedResponseCode = 500; - } else if(testCase.type === "none" && !options.ensSocialsRedirect) { - expectedResponseCode = 404; - } - expect(res.statusCode).to.be.equal(expectedResponseCode, `subdomain: ${subdomain}`); - } else { - - expect(res.statusCode).to.be.gt(399, `subdomain: ${subdomain}`); - } - } - }); - - gen.registerTests("x-content-location and x-content-path", ["name", "type"],["ensSocialsRedirect", "dohQueryType"], function(testCase) { - return testCase.type === "ipfs" || testCase.type === "ipns"; - },async function(testCase) { - var {type, name, contentHash, options} = testCase - const { _result, content_location, content_path, content_storage_type, res } = await commonSetup(testCase); - if(options.blacklisted === true) { - expect(res.statusCode).to.be.equal(451); - expect(content_path).to.be.undefined; - expect(content_location).to.be.undefined; - expect(content_storage_type).to.be.undefined; - return; - } - - if(options.ensError) { - expect(res.statusCode).to.be.equal(500); - expect(content_path).to.be.undefined; - expect(content_location).to.be.undefined; - expect(content_storage_type).to.be.undefined; - return; - } - - const request = { - trace_id: "TEST_TRACE_ID", - } - - contentHash = recalculateIpnsContentHash(request, type, contentHash, harnessInput, name); - if (options.subdomainSupport) { - expect(content_path).to.be.equal(`/`); - let fragment = contentHash?.substring(7); - //see the en.wikipedia-on-ipfs.org testcase - if(type === "ipns") { - fragment = fragment && normalizeUrlFragmentForIpfsSubdomainGateway(fragment); - } - expect(content_location).to.be.equal(`${fragment}.${type}.ipfs`); - } else { - expect(content_path).to.be.equal(`/${type}/${contentHash?.substring(7)}/`) - expect(content_location).to.be.equal("ipfs") - } - expect(content_storage_type).to.be.equal(getCodecFromType(testCase.type as any)); - }); - gen.registerTests("x-content-location and x-content-path", ["name", "type"],["ensSocialsRedirect", "subdomainSupport", "dohQueryType"], function(testCase) { - return testCase.type === "arweave"; - },async function(testCase) { - var {type, name, contentHash, additionalInfo, options} = testCase - const { _result, content_location, content_path, content_storage_type, res } = await commonSetup(testCase); - if(options.blacklisted === true) { - expect(res.statusCode).to.be.equal(451); - expect(content_path).to.be.undefined; - expect(content_location).to.be.undefined; - expect(content_storage_type).to.be.undefined; - return; - } - - if(options.ensError) { - expect(res.statusCode).to.be.equal(500); - expect(content_path).to.be.undefined; - expect(content_location).to.be.undefined; - expect(content_storage_type).to.be.undefined; - return; - } - - expect(res.statusCode).to.be.equal(200); - const ar_id = contentHash?.substring('arweave://'.length); - expect(content_path).to.be.equal('/' + ar_id + '/'); - expect(additionalInfo.arweave?.subdomain_sandbox_id).to.not.be.undefined; //this would be a bad test case if it was - expect(content_location).to.be.equal(`${additionalInfo.arweave?.subdomain_sandbox_id}.arweave`); - expect(content_storage_type).to.be.equal(getCodecFromType(testCase.type as any)); - }); - gen.registerTests("x-content-location and x-content-path", ["name", "type"],["ensSocialsRedirect", "subdomainSupport", "dohQueryType"], function(testCase) { - return testCase.type === "swarm"; - },async function(testCase) { - var {type, name, contentHash, options} = testCase - const { _result, content_location, content_path, content_storage_type, res } = await commonSetup(testCase); - if(options.blacklisted === true) { - expect(res.statusCode).to.be.equal(451); - expect(content_path).to.be.undefined; - expect(content_location).to.be.undefined; - expect(content_storage_type).to.be.undefined; - return; - } - - if(options.ensError) { - expect(res.statusCode).to.be.equal(500); - expect(content_path).to.be.undefined; - expect(content_location).to.be.undefined; - expect(content_storage_type).to.be.undefined; - return; - } - - expect(res.statusCode).to.be.equal(200); - expect(content_path).to.be.equal('/bzz/' + contentHash?.substring('bzz://'.length) + '/'); - expect(content_location).to.be.equal("swarm"); - expect(content_storage_type).to.be.equal(getCodecFromType(testCase.type as any)); - }); - gen.registerTests("x-content-location and x-content-path", ["name", "type"],["subdomainSupport", "blacklisted", "dohQueryType"], function(testCase) { - return testCase.type === "none"; - },async function(testCase) { - var {type, name, contentHash, options} = testCase - const { _result, content_location, content_path, content_storage_type, res } = await commonSetup(testCase); - - if(options.ensError) { - expect(res.statusCode).to.be.equal(500); - expect(content_path).to.be.undefined; - expect(content_location).to.be.undefined; - expect(content_storage_type).to.be.undefined; - return; - } - - if(options.ensSocialsRedirect) { - expect(res.statusCode).to.be.equal(200); - expect(content_path).to.be.equal(`/?name=${name}/`); - expect(content_location).to.be.equal("socials.com"); - } else { - expect(res.statusCode).to.be.equal(404); - expect(content_path).to.be.undefined; - expect(content_location).to.be.undefined; - } - - expect(content_storage_type).to.be.undefined; - }); - - gen.runTests(this); -}); - -describe('Caddy API Integration Tests', function () { - var harnessInput: HarnessType; - var server: ProxyServer; - var commonSetup: any; //not even the language server can figure out what this is - - beforeEach(() => { - let r = buildAppContainer(); - r.AppContainer.bind(ProxyServer).to(ProxyServer).inSingletonScope(); - server = r.AppContainer.get(ProxyServer); - harnessInput = r; - commonSetup = harness(harnessInput)({caddyServer: server}) - }); - - afterEach(() => { - harnessInput.AppContainer.unbindAll(); - harnessInput = null as any; - server = null as any; - commonSetup = null as any; - }); - - gen.registerTests("normal blacklist behavior", ["name", "type"], ["ensSocialsRedirect", "dohQueryType"], function(testCase) { - return testCase.options.blacklisted === true || testCase.options.blacklisted === 'throws'; - }, async function(testCase) { - var {type, name, contentHash, options} = testCase - - const { _result, res } = await commonSetup(testCase); - - if(options.blacklisted === 'throws') { - var expectedResponseCode = 200; - if(options.ensError) { - expectedResponseCode = 500; - } else if(testCase.type === "none" && !options.ensSocialsRedirect) { - expectedResponseCode = 404; - } - expect(res.statusCode).to.be.equal(expectedResponseCode); - } else { - expect(res.statusCode).to.be.equal(451); - } - }); - - gen.registerTests("subdomain blacklist behavior", ["name", "type"], ["ensSocialsRedirect", "dohQueryType", "subdomainSupport"], function(testCase) { - return testCase.options.blacklisted === true || testCase.options.blacklisted === 'throws' && testCase.type !== "none"; - }, async function(originalTestCase) { - const subdomains = ["asdf", "www", "a.b.c", "asdf.gsdh"]; - for (const subdomain of subdomains) { - - var testCase = JSON.parse(JSON.stringify(originalTestCase)); - testCase.name = subdomain + "." + originalTestCase.name; - if(originalTestCase.options.blacklisted === 'throws') { - //if the service errors, we want to be unavailable - harnessInput.testDomainQuerySuperagentService.error = true; - } else { - harnessInput.testDomainQuerySuperagentService.setBlacklist(harnessInput.hostnameSubstitionService.substituteHostname(originalTestCase.name), true); - } - const fudge = JSON.parse(JSON.stringify(testCase)); - - fudge.options.blacklisted = false; //we don't want the subdomain blacklisted, just the original domain - - const { _result, res } = await commonSetup(fudge); - - var {options} = testCase - if(options.blacklisted === "throws" && options.ensError !== "throws") { - expect(res.statusCode).to.be.equal(200); - return; - } - - if(res.statusCode === 200) { - (harnessInput.AppContainer.get(DITYPES.LoggerService) as any).logMessages() - } - - expect(res.statusCode).to.be.greaterThan(399); - } - }); - - gen.registerTests("permutation", ["name", "type"],["subdomainSupport", "dohQueryType"], function(testCase) { - return testCase.type === "ipfs" || testCase.type === "ipns" || testCase.type === "arweave" || testCase.type === "swarm"; - },async function(testCase) { - var {type, name, contentHash, options} = testCase - const { res } = await commonSetup(testCase); - if(options.ensError && options.blacklisted !== true) { - expect(res.statusCode).to.be.equal(500); - return; - } - if(options.blacklisted) { - if(options.blacklisted === 'throws') { - expect(res.statusCode).to.be.equal(200); - } else { - expect(res.statusCode).to.be.equal(451); - } - return; - } - - if(options.ensError) { - expect(res.statusCode).to.be.equal(500); - return; - } - - expect(res.statusCode).to.be.equal(200); - }); - - gen.registerTests("permutation", ["name", "type"],["subdomainSupport", "dohQueryType"], function(testCase) { - return testCase.type === "none"; - },async function(testCase) { - var {type, name, contentHash, options} = testCase - const { _result, content_location, content_path, res } = await commonSetup(testCase); - if(options.ensError && options.blacklisted !== true) { - (harnessInput.AppContainer.get(DITYPES.LoggerService) as TestLoggerService).logMessages() - expect(res.statusCode).to.be.equal(500); - return; - } - if(options.blacklisted === true) { - expect(res.statusCode).to.be.equal(451); - return; - } - - if(options.ensError) { - expect(res.statusCode).to.be.equal(500); - return; - } - - if(options.ensSocialsRedirect) { - expect(res.statusCode).to.be.equal(200); - } else { - expect(res.statusCode).to.be.equal(404); - } - }); - - gen.runTests(this); -}); - - -describe('DoH GET API Integration Tests', function () { - var harnessInput: HarnessType; - var server: ProxyServer; - var commonSetup: any; //not even the language server can figure out what this is - - beforeEach(() => { - let r = buildAppContainer(); - r.AppContainer.bind(ProxyServer).to(ProxyServer).inSingletonScope(); - server = r.AppContainer.get(ProxyServer); - harnessInput = r; - commonSetup = harness(harnessInput)({dohServerGetRequest: server}) - }); - - afterEach(() => { - harnessInput.AppContainer.unbindAll(); - harnessInput = null as any; - server = null as any; - commonSetup = null as any; - }); - - function handleBlacklistBehaviorTest(testCase: TestCaseType&{options: Options}, res: Response, result: any) { - if(testCase.options.blacklisted === 'throws') { - expect(res.statusCode).to.be.equal(200); - } else { - if(res.statusCode === 451) { - //FIXME: this 451 clause shouldn't exist, this is just to describe current behavior - expect(res.statusCode).to.be.equal(451); - return; - } else { - expect(res.statusCode).to.be.equal(200); - const payload = JSON.parse(result as string); - expect(Math.abs(payload.Status)).to.be.equal(0); - expect(payload.Answer).to.be.be.instanceOf(Array); - expect(payload.Answer).to.be.empty; - } - } - } - - gen.registerTests("normal blacklist behavior", ["name", "type"], ["ensSocialsRedirect", "ensError", "subdomainSupport"], function(testCase) { - return testCase.options.blacklisted === true || testCase.options.blacklisted === 'throws'; - }, async function(testCase) { - const { _result, res } = await commonSetup(testCase); - handleBlacklistBehaviorTest(testCase, res, _result); - }); - - gen.registerTests("subdomain blacklist behavior", ["name", "type"], ["ensSocialsRedirect", "ensError", "subdomainSupport"], function(testCase) { - return testCase.options.blacklisted === true || testCase.options.blacklisted === 'throws'; - }, async function(originalTestCase) { - const subdomains = ["asdf", "www", "a.b.c", "asdf.gsdh"]; - for (const subdomain of subdomains) { - - var testCase = {...originalTestCase}; - testCase.name = subdomain + "." + originalTestCase.name; - if(originalTestCase.options.blacklisted === 'throws') { - //if the service errors, we want to be unavailable - harnessInput.testDomainQuerySuperagentService.error = true; - } else { - harnessInput.testDomainQuerySuperagentService.setBlacklist(originalTestCase.name, true); - } - const fudge = {...testCase}; - fudge.options = {...(testCase.options), blacklisted: false}; //we don't want the subdomain blacklisted, just the original domain - - const { _result, res } = await commonSetup(fudge); - handleBlacklistBehaviorTest(testCase, res, _result); - } - }); - - gen.registerTests("permutation", ["name", "type"],["ensSocialsRedirect", "subdomainSupport"], function(testCase) { - return true; - },async function(testCase) { - var {options, type} = testCase - const { res, _result } = await commonSetup(testCase); - const request = { - trace_id: "TEST_TRACE_ID", - } - const contentHash = recalculateIpnsContentHash(request, type, testCase.contentHash, harnessInput, testCase.name); - - /* - DoH should *not* respect the server being hosted at an endpoint, it is for raw ENS queries only - */ - if(testCase.name.endsWith("local")) { - expect(res.statusCode).to.be.equal(200); - const ret = JSON.parse(_result); - expect(Math.abs(ret.Status)).to.be.equal(0); - expect(ret.Answer).to.be.be.instanceOf(Array); - expect(ret.Answer).to.be.empty; - return; - } - - if(options.blacklisted === true) { - expect(res.statusCode).to.be.equal(451); - return; - } - - if(options.ensError && options.dohQueryType === "TXT") { - expect(res.statusCode).to.be.equal(200); - const ret = JSON.parse(_result); - expect(Math.abs(ret.Status)).to.be.equal(2); - return; - } - - const result = JSON.parse(_result as string); - expect(res.statusCode).to.be.equal(200); - expect(Math.abs(result.Status)).to.be.equal(0); - expect(result.Answer).to.be.be.instanceOf(Array); - if(options.dohQueryType === "TXT") { - if(type === "none") { - expect(result.Answer).to.be.empty; - return; - } - expect(result.Answer).to.not.be.empty; - const the_result = result.Answer[0]; - expect(the_result.type).to.be.equal(16); - expect(the_result.name).to.be.equal(harnessInput.hostnameSubstitionService.substituteHostname(testCase.name)); - const prefix = type === "arweave" ? `ar://` : `/${getDnslinkPrefixFromType(type)}/`; - const dnslink_string = `dnslink=${prefix}${contentHash?.substring(contentHash.indexOf("://") + 3)}`; - expect(the_result.data).to.be.equal(dnslink_string); - //if the default test configuration service was changed, this should be too - expect(the_result.ttl).to.be.equal(69); - } else { - expect(result.Answer).to.be.empty; - } - }); - - gen.runTests(this); -}); - -function getCodecFromType(type: "ipfs" | "ipns" | "arweave" | "swarm"):string { - if(type === "ipfs") { - return "ipfs-ns"; - } else if(type === "ipns") { - return "ipns-ns"; - } else if(type === "arweave") { - return "arweave-ns"; - } else if(type === "swarm") { - return "swarm"; - } - return type as never -} - -function recalculateIpnsContentHash(request: IRequestContext, type: string, contentHash: string | undefined, harnessInput: HarnessType, name: string) { - if (type === "ipns" && contentHash) { - const peerId = getPeerId(request, harnessInput.AppContainer.get(DITYPES.LoggerService), contentHash.substring(7), name) || "THIS_SHOULD_NOT_BE_NULL"; - return "ipns://" + peerId; - } - return contentHash; -} - -function getDnslinkPrefixFromType(type: "ipfs" | "ipns" | "arweave" | "swarm"):string { - if(type === "ipfs") { - return "ipfs"; - } else if(type === "ipns") { - return "ipns"; - } else if(type === "arweave") { - return "ar"; - } else if(type === "swarm") { - return "bzz"; - } - return type as never -} - -function createRequestEnsureTotality(payload: HarnessProxyServerPayloadType, name: string, options:Options):RequestOptions { - if (isProxyServerPayloadType(payload)) { - return { - method: 'GET' as RequestMethod, - url: "localhost", - headers: { - 'Host': name, - }, - }; - } else if (isCaddyServerPayloadType(payload)) { - return { - method: 'GET' as RequestMethod, - url: `http://localhost`, - headers: { - 'Host': "localhost", - }, - query: { - domain: name - } - }; - } else if (isDohServerGetPayloadType(payload)) { - return { - method: 'GET' as RequestMethod, - url: `http://localhost`, - headers: { - 'Host': "localhost", - }, - query: { - name, - type: options.dohQueryType - } - }; - } else { - return payload as never; - } -} - -async function callPayloadEnsureTotality(payload: HarnessProxyServerPayloadType, req: Request>, res: Response>):Promise { - if (isProxyServerPayloadType(payload)) { - await payload.proxyServer.proxyServer(req, res); - } else if (isCaddyServerPayloadType(payload)) { - await payload.caddyServer.caddy(req, res); - } else if (isDohServerGetPayloadType(payload)) { - await payload.dohServerGetRequest._DnsQuery.dnsqueryGet(req, res); - } else { - return payload as never; - } -} diff --git a/src/utils/ErrorSuccess/index.ts b/src/utils/ErrorSuccess/index.ts deleted file mode 100644 index 016d0da..0000000 --- a/src/utils/ErrorSuccess/index.ts +++ /dev/null @@ -1,15 +0,0 @@ -export type Error = { - error: true; - _tag: Tag; - _type: Type; - reason: string; -}; - -export type Success = { - error: false; - result: Result; -}; - -export type ErrorSuccess = - | (Error & ErrorContext) - | Success; diff --git a/src/utils/index.ts b/src/utils/index.ts deleted file mode 100644 index 5f18104..0000000 --- a/src/utils/index.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { Request } from "express"; -import { punycodeDomainPartsToUnicode } from "./punycodeConverter"; -import {IHostnameSubstitutionService } from "../services/HostnameSubstitutionService"; -import { VALID_ENS_TLDS } from "../configuration"; - -export function getDomainOfRequestFromGet(hostnameSubstitutionService: IHostnameSubstitutionService, req: Request, param = "domain") { - let domain = req.query[param]; - if (typeof domain !== "string") { - return null; - } - - domain = hostnameSubstitutionService.substituteHostname(domain); - - domain = punycodeDomainPartsToUnicode(domain); - if (hostnameIsENSTLD(domain)) { - return domain; - } else { - return null; - } -} - -export function hostnameIsENSTLD(hostname: string) { - return VALID_ENS_TLDS.find((tld) => hostname.endsWith("."+tld)) !== undefined; -} - -export const ensureTrailingSlash = (path: string) => { - if (path.endsWith("/")) { - return path; - } else { - return path + "/"; - } -}; - -export function getTraceIdFromRequest(req: Request) { - const trace_id_header = 'x-limo-id'; - const trace_id = typeof req.headers[trace_id_header] === "string" ? req.headers[trace_id_header] : "UNDEFINED_TRACE_ID"; - return trace_id; -} \ No newline at end of file diff --git a/tsconfig.base.json b/tsconfig.base.json new file mode 100644 index 0000000..711613a --- /dev/null +++ b/tsconfig.base.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "declaration": true, + "declarationMap": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "importHelpers": true, + "moduleResolution": "NodeNext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitAny": true, + "noImplicitReturns": true, + "noImplicitThis": true, + "noUnusedLocals": true, + "preserveSymlinks": true, + "preserveWatchOutput": true, + "pretty": false, + "rootDir": "./src", + "strict": true, + "sourceMap": true, + "target": "es2022", + "module": "NodeNext" + }, + } + \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json deleted file mode 100644 index 42aace4..0000000 --- a/tsconfig.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "compilerOptions": { - "outDir": "./dist", - "allowJs": true, - "noImplicitAny": true, - "strictNullChecks": true, - "noImplicitThis": true, - "noEmitOnError": true, - "module": "ES2020", - "target": "ES2020", - "experimentalDecorators": true, - "emitDecoratorMetadata": true, - "types": ["reflect-metadata"], - "esModuleInterop": true, - "moduleResolution": "Node", - "resolveJsonModule": true, - "skipLibCheck": true, - }, - "ts-node": { - "esm": true, - "experimentalSpecifierResolution": "node" - }, - "include": ["./src/**/*"] -}