From 38e91d40bd88f7e95aaf0b64ccb9ba728016c3a6 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 30 Sep 2024 12:56:20 +0100 Subject: [PATCH] test: add tests --- package-lock.json | 33 +++++- psa/config.js | 59 ++++++++--- psa/functions/download.js | 11 +- psa/functions/hash.js | 6 +- psa/lib.js | 191 ++++++++++++++++++++++++++-------- psa/package.json | 7 +- psa/test/helpers/bytes.js | 14 +++ psa/test/helpers/dag.js | 39 +++++++ psa/test/helpers/math.js | 9 ++ psa/test/helpers/resources.js | 35 +++++++ psa/test/lib.spec.js | 113 ++++++++++++++++++++ stacks/psa-stack.js | 18 +++- tsconfig.json | 3 +- 13 files changed, 462 insertions(+), 76 deletions(-) create mode 100644 psa/test/helpers/bytes.js create mode 100644 psa/test/helpers/dag.js create mode 100644 psa/test/helpers/math.js create mode 100644 psa/test/helpers/resources.js create mode 100644 psa/test/lib.spec.js diff --git a/package-lock.json b/package-lock.json index 53ddd175..f351b47b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7928,9 +7928,10 @@ "dev": true }, "node_modules/@ipld/car": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/@ipld/car/-/car-5.3.0.tgz", - "integrity": "sha512-OB8LVvJeVAFFGluNIkZeDZ/aGeoekFKsuIvNT9I5sJIb5WekQuW5+lekjQ7Z7mZ7DBKuke/kI4jBT1j0/akU1w==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/@ipld/car/-/car-5.3.2.tgz", + "integrity": "sha512-Bb4XrCFlnsCb9tTzZ1I8zo9O61D9qm7HfvuYrQ9gzdE8YhjyVIjrjmHmnoSWV/uCmyc2/bcqiDPIg+9WljXNzg==", + "license": "Apache-2.0 OR MIT", "dependencies": { "@ipld/dag-cbor": "^9.0.7", "cborg": "^4.0.5", @@ -14628,6 +14629,7 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/carstream/-/carstream-2.2.0.tgz", "integrity": "sha512-/gHkK0lQjmGM45fhdx8JD+x7a1XS1qUk3T9xWWSt3oZiWPLq4u/lnDstp+N55K7hqTKKlb0CCr43EHTrlbmJSQ==", + "license": "Apache-2.0 OR MIT", "dependencies": { "@ipld/dag-cbor": "^9.0.3", "multiformats": "^13.0.1", @@ -17132,6 +17134,7 @@ "resolved": "https://registry.npmjs.org/entail/-/entail-2.1.2.tgz", "integrity": "sha512-/icW51VHeJo5j6z6/80vO6R7zAdvHDODHYcc2jItrhRvP/zfTlm2b+xcEkp/Vt3UI6R5651Stw0AGpE1Gzkm6Q==", "dev": true, + "license": "(Apache-2.0 AND MIT)", "dependencies": { "dequal": "^2.0.3", "globby": "13.1.4", @@ -29404,6 +29407,11 @@ "@aws-sdk/s3-request-presigner": "^3.658.1", "multiformats": "^13.3.0", "sst": "^2.43.7" + }, + "devDependencies": { + "@ipld/car": "^5.3.2", + "entail": "^2.1.2", + "nanoid": "^5.0.7" } }, "psa/node_modules/@aws-crypto/sha256-js": { @@ -29839,6 +29847,25 @@ "url": "https://github.com/sponsors/isaacs" } }, + "psa/node_modules/nanoid": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.0.7.tgz", + "integrity": "sha512-oLxFY2gd2IqnjcYyOXD8XGCftpGtZP2AbHbOkthDkvRywH5ayNtPVy9YlOPcHckXzbLTCHpkb7FB+yuxKV13pQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.js" + }, + "engines": { + "node": "^18 || >=20" + } + }, "psa/node_modules/onetime": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", diff --git a/psa/config.js b/psa/config.js index 96a322d5..5686dcb9 100644 --- a/psa/config.js +++ b/psa/config.js @@ -1,26 +1,53 @@ import { base32 } from 'multiformats/bases/base32' +import { S3Client } from '@aws-sdk/client-s3' +import { createDudeWhereLocator, createHashEncodedInKeyHasher, createObjectHasher, createObjectLocator } from './lib.js' +import { mustGetEnv } from '../lib/env.js' -/** @type {import('../lib.js').Bucket[]} */ +/** @type {import('./lib.js').Bucket[]} */ export const buckets = [ { - name: process.env.S3_DOTSTORAGE_0_BUCKET_NAME, - region: process.env.S3_DOTSTORAGE_0_BUCKET_REGION, - toKey: root => { - const s = root.toV1().toString(base32) - return `complete/${s}/${s}.car` - } + locator: createObjectLocator( + new S3Client({ region: mustGetEnv('S3_DOTSTORAGE_0_BUCKET_REGION') }), + mustGetEnv('S3_DOTSTORAGE_0_BUCKET_NAME'), + root => { + const s = root.toV1().toString(base32) + return `complete/${s}/${s}.car` + } + ), + hasher: createObjectHasher() }, { - name: process.env.S3_DOTSTORAGE_1_BUCKET_NAME, - region: process.env.S3_DOTSTORAGE_1_BUCKET_REGION, - toKey: root => { - const s = root.toV1().toString(base32) - return `complete/${s}/${s}.car` - } + locator: createObjectLocator( + new S3Client({ region: mustGetEnv('S3_DOTSTORAGE_1_BUCKET_REGION') }), + mustGetEnv('S3_DOTSTORAGE_1_BUCKET_NAME'), + root => { + const s = root.toV1().toString(base32) + return `complete/${s}/${s}.car` + } + ), + hasher: createObjectHasher() }, { - name: process.env.S3_PICKUP_BUCKET_NAME, - region: process.env.S3_PICKUP_BUCKET_REGION, - toKey: r => `pickup/${r}/${r}.root.car` + locator: createObjectLocator( + new S3Client({ region: mustGetEnv('S3_PICKUP_BUCKET_REGION') }), + mustGetEnv('S3_PICKUP_BUCKET_NAME'), + r => `pickup/${r}/${r}.root.car` + ), + hasher: createObjectHasher() + }, + { + locator: createDudeWhereLocator( + new S3Client({ + endpoint: mustGetEnv('R2_ENDPOINT'), + credentials: { + accessKeyId: mustGetEnv('R2_ACCESS_KEY_ID'), + secretAccessKey: mustGetEnv('R2_SECRET_ACCESS_KEY'), + }, + region: mustGetEnv('R2_REGION') + }), + mustGetEnv('R2_DUDEWHERE_BUCKET_NAME'), + mustGetEnv('R2_CARPARK_BUCKET_NAME') + ), + hasher: createHashEncodedInKeyHasher() } ] diff --git a/psa/functions/download.js b/psa/functions/download.js index 790b95e1..7f6c022a 100644 --- a/psa/functions/download.js +++ b/psa/functions/download.js @@ -1,22 +1,23 @@ import { ApiHandler } from 'sst/node/api' import * as Link from 'multiformats/link' import { getDownloadURL, NotFound } from '../lib.js' -import { errorResponse } from './lib/util.js' +import * as Config from '../config.js' +import { errorResponse, okResponse } from '../util.js' export const handler = ApiHandler(async event => { const { searchParams } = new URL(`http://localhost/?${event.rawQueryString}`) let root try { - root = Link.parse(searchParams.get('root')) - } catch (err) { + root = Link.parse(searchParams.get('root') ?? '') + } catch { return errorResponse('Invalid "root" search parameter', 400) } try { - const url = await getDownloadURL(buckets, root) + const url = await getDownloadURL(Config.buckets, root) return okResponse({ root, url }) - } catch (err) { + } catch (/** @type {any} */ err) { return errorResponse(err.message, err instanceof NotFound ? 404 : 500) } }) diff --git a/psa/functions/hash.js b/psa/functions/hash.js index 92756c73..db148a25 100644 --- a/psa/functions/hash.js +++ b/psa/functions/hash.js @@ -9,15 +9,15 @@ export const handler = ApiHandler(async event => { let root try { - root = Link.parse(searchParams.get('root')) - } catch (err) { + root = Link.parse(searchParams.get('root') ?? '') + } catch { return errorResponse('Invalid "root" search parameter', 400) } try { const { link, size } = await getHash(Config.buckets, root) return okResponse({ root, link, size }) - } catch (err) { + } catch (/** @type {any} */ err) { return errorResponse(err.message, err instanceof NotFound ? 404 : 500) } }) diff --git a/psa/lib.js b/psa/lib.js index b6fccdce..b66ea197 100644 --- a/psa/lib.js +++ b/psa/lib.js @@ -1,5 +1,5 @@ import crypto from 'node:crypto' -import { HeadObjectCommand, GetObjectCommand, S3Client } from '@aws-sdk/client-s3' +import { HeadObjectCommand, GetObjectCommand, ListObjectsV2Command, S3Client } from '@aws-sdk/client-s3' import { getSignedUrl } from '@aws-sdk/s3-request-presigner' import * as Link from 'multiformats/link' import * as Digest from 'multiformats/hashes/digest' @@ -7,29 +7,67 @@ import { sha256 } from 'multiformats/hashes/sha2' /** * @typedef {import('multiformats').UnknownLink} UnknownLink - * @typedef {{ name: string, region: string, toKey: (root: UnknownLink) => string }} Bucket - * @typedef {{ bucket: Bucket, key: string, size: number }} Location + * @typedef {{ locator: Locator, hasher: Hasher }} Bucket + * @typedef {{ root: UnknownLink, client: S3Client, bucket: string, key: string, size: number }} Location + * @typedef {{ locate: (root: UnknownLink) => Promise }} Locator + * @typedef {{ digest: (location: Location) => Promise }} Hasher */ +const CAR_CODEC = 0x0202 + /** + * Get the hash of a CAR file stored in one of the passed buckets that contains + * the complete DAG for the given root CID. + * * @param {Bucket[]} buckets * @param {UnknownLink} root - * @returns {Promise} + * @throws {NotFound} */ -export const locateCAR = async (buckets, root) => { +export const getHash = async (buckets, root) => { for (const bucket of buckets) { - const key = bucket.toKey(root) - const client = new S3Client({ region: bucket.region }) - const cmd = new HeadObjectCommand({ - Bucket: bucket.name, - Key: key - }) + const location = await bucket.locator.locate(root) + if (!location) continue + + const link = await bucket.hasher.digest(location) + return { link, size: location.size } + } + throw new NotFound(`not found: ${root}`) +} + +/** + * Create a locator that can find a key in any S3 compatible bucket. + * + * @param {S3Client} client + * @param {string} bucketName + * @param {(root: UnknownLink) => string} encodeKey + * @returns {Locator} + */ +export const createObjectLocator = (client, bucketName, encodeKey) => + new S3ObjectLocator(client, bucketName, encodeKey) + +/** @implements {Locator} */ +class S3ObjectLocator { + /** + * @param {S3Client} client + * @param {string} bucketName + * @param {(root: UnknownLink) => string} encodeKey + */ + constructor (client, bucketName, encodeKey) { + this.client = client + this.bucketName = bucketName + this.encodeKey = encodeKey + } + + /** @param {UnknownLink} root */ + async locate (root) { + const key = this.encodeKey(root) + const cmd = new HeadObjectCommand({ Bucket: this.bucketName, Key: key }) try { - const res = await client.send(cmd) + const res = await this.client.send(cmd) const size = res.ContentLength if (size == null) throw new Error(`missing ContentLength: ${root}`) - return { bucket, key, size } - } catch (err) { + return { root, client: this.client, bucket: this.bucketName, key, size } + } catch (/** @type {any} */ err) { if (err?.$metadata.httpStatusCode !== 404) { throw err } @@ -38,33 +76,102 @@ export const locateCAR = async (buckets, root) => { } /** - * Get the hash of a CAR file stored in one of the passed buckets that contains - * the complete DAG for the given root CID. - * - * @param {Bucket[]} buckets - * @param {UnknownLink} root - * @throws {NotFound} + * Creates a client that knows how to locate an object by looking in the legacy + * DUDEWHERE index bucket to find the key. + * + * @param {S3Client} client + * @param {string} indexBucketName Name of the DUDEWHERE bucket. + * @param {string} dataBucketName Name of the CARPARK bucket. */ -export const getHash = async (buckets, root) => { - const location = await locateCAR(buckets, root) - if (!location) { - throw new NotFound(`Not found: ${root}`) +export const createDudeWhereLocator = (client, indexBucketName, dataBucketName) => + new DudeWhereLocator(client, indexBucketName, dataBucketName) + +/** @implements {Locator} */ +class DudeWhereLocator { + /** + * @param {S3Client} client + * @param {string} indexBucketName Name of the DUDEWHERE bucket. + * @param {string} dataBucketName Name of the CARPARK bucket. + */ + constructor (client, indexBucketName, dataBucketName) { + this.client = client + this.indexBucketName = indexBucketName + this.dataBucketName = dataBucketName } - const s3 = new S3Client({ region }) - const cmd = new GetObjectCommand({ Bucket: location.bucket.name, Key: location.key }) + /** @param {UnknownLink} root */ + async locate (root) { + const cmd = new ListObjectsV2Command({ + Bucket: this.indexBucketName, + MaxKeys: 2, + Prefix: `${root}/` + }) + const res = await this.client.send(cmd) + const contents = res.Contents + + // if there's no items then it simply not found + if (!contents?.length) return + // if there's more than one item, then someone else has stored this root, + // as multiple shards, or with a different block ordering. There's no way + // to know which subset of shards contains the entire DAG. + if (contents.length > 1) return + // if no key then this is a weird situation + if (!contents[0].Key) return + + const key = contents[0].Key + const locator = createObjectLocator(this.client, this.dataBucketName, () => { + const link = Link.parse(key.split('/').pop() ?? '') + return `${link}/${link}.car` + }) + return locator.locate(root) + } +} + +/** + * A hasher that reads data from a location and hashes it. + * @returns {Hasher} + */ +export const createObjectHasher = () => new ObjectHasher() + +/** @implements {Hasher} */ +class ObjectHasher { + /** @param {Location} location */ + async digest (location) { + const cmd = new GetObjectCommand({ Bucket: location.bucket, Key: location.key }) - const res = await s3.send(cmd) - if (!res.Body) { - throw new NotFound(`Object not found: ${root}`) + const res = await location.client.send(cmd) + if (!res.Body) { + throw new NotFound(`Object not found: ${location.root}`) // shouldn't happen + } + + const hash = crypto.createHash('sha256') + await res.Body.transformToWebStream() + .pipeTo(new WritableStream({ write: chunk => { hash.update(chunk) } })) + + const digest = Digest.create(sha256.code, hash.digest()) + return Link.create(CAR_CODEC, digest) } +} - const hash = crypto.createHash('sha256') - await res.Body.transformToWebStream() - .pipeTo(new WritableStream({ write: chunk => { hash.update(chunk) } })) +/** + * A hasher that extracts the CAR hash from the key. + * @returns {Hasher} + */ +export const createHashEncodedInKeyHasher = () => new HashEncodedInKeyHasher() - const digest = Digest.create(sha256.code, hash.digest()) - return { link: Link.create(CAR_CODEC, digest), size: location.size } +/** @implements {Hasher} */ +class HashEncodedInKeyHasher { + /** @param {Location} location */ + async digest (location) { + const filename = location.key.split('/').pop() + if (!filename || !filename.endsWith('.car')) { + throw new Error('unexpected key format') + } + const hash = + /** @type {import('multiformats').Link} */ + (Link.parse(filename.replace('.car', ''))) + return hash + } } export const DownloadURLExpiration = 1000 * 60 * 60 * 24 // 1 day in seconds @@ -78,15 +185,15 @@ export const DownloadURLExpiration = 1000 * 60 * 60 * 24 // 1 day in seconds * @throws {NotFound} */ export const getDownloadURL = async (buckets, root) => { - const location = await locateCAR(buckets, root) - if (!location) { - throw new NotFound(`Not found: ${root}`) - } + for (const bucket of buckets) { + const location = await bucket.locator.locate(root) + if (!location) continue - const s3 = new S3Client({ region: location.bucket.region }) - const cmd = new GetObjectCommand({ Bucket: location.bucket.name, Key: location.key }) - const url = await getSignedUrl(s3, cmd, { expiresIn: DownloadURLExpiration }) - return new URL(url) + const cmd = new GetObjectCommand({ Bucket: location.bucket, Key: location.key }) + const url = await getSignedUrl(location.client, cmd, { expiresIn: DownloadURLExpiration }) + return new URL(url) + } + throw new NotFound(`not found: ${root}`) } export class NotFound extends Error {} diff --git a/psa/package.json b/psa/package.json index f40381d8..58db4a24 100644 --- a/psa/package.json +++ b/psa/package.json @@ -3,12 +3,17 @@ "version": "0.0.0", "type": "module", "scripts": { - "test": "echo 'TODO'" + "test": "entail test/*.spec.js" }, "dependencies": { "@aws-sdk/client-s3": "^3.658.1", "@aws-sdk/s3-request-presigner": "^3.658.1", "multiformats": "^13.3.0", "sst": "^2.43.7" + }, + "devDependencies": { + "@ipld/car": "^5.3.2", + "entail": "^2.1.2", + "nanoid": "^5.0.7" } } diff --git a/psa/test/helpers/bytes.js b/psa/test/helpers/bytes.js new file mode 100644 index 00000000..8280b9bd --- /dev/null +++ b/psa/test/helpers/bytes.js @@ -0,0 +1,14 @@ +import { webcrypto } from 'node:crypto' + +/** @param {number} size */ +export const randomBytes = size => { + const bytes = new Uint8Array(size) + while (size) { + const chunk = new Uint8Array(Math.min(size, 65_536)) + webcrypto.getRandomValues(chunk) + + size -= bytes.length + bytes.set(chunk, size) + } + return bytes +} diff --git a/psa/test/helpers/dag.js b/psa/test/helpers/dag.js new file mode 100644 index 00000000..babd0459 --- /dev/null +++ b/psa/test/helpers/dag.js @@ -0,0 +1,39 @@ +import { sha256 } from 'multiformats/hashes/sha2' +import * as raw from 'multiformats/codecs/raw' +import * as Link from 'multiformats/link' +import { CarBufferWriter } from '@ipld/car' +import { randomInteger } from './math.js' +import { randomBytes } from './bytes.js' + +export const randomBlock = () => { + const bytes = randomBytes(randomInteger(1, 1024 * 1024)) + const mh = sha256.digest(bytes) + if (mh instanceof Promise) throw new Error('sha256 hasher is async') + return { cid: Link.create(raw.code, mh), bytes } +} + +/** + * @param {import('multiformats').UnknownLink} root + * @param {import('multiformats').Block[]} blocks + */ +export const encodeCAR = (root, blocks) => { + const roots = [root] + // @ts-expect-error + const headerSize = CarBufferWriter.headerLength({ roots }) + let blocksSize = 0 + for (const b of blocks) { + // @ts-expect-error + blocksSize += CarBufferWriter.blockLength(b) + } + // @ts-expect-error + const writer = CarBufferWriter.createWriter(new Uint8Array(headerSize + blocksSize), { roots }) + + for (const b of blocks) { + // @ts-expect-error + writer.write(b) + } + const bytes = writer.close() + const mh = sha256.digest(bytes) + if (mh instanceof Promise) throw new Error('sha256 hasher is async') + return { cid: Link.create(0x0202, mh), bytes } +} diff --git a/psa/test/helpers/math.js b/psa/test/helpers/math.js new file mode 100644 index 00000000..2ee4eddc --- /dev/null +++ b/psa/test/helpers/math.js @@ -0,0 +1,9 @@ +/** + * @param {number} min + * @param {number} max + */ +export const randomInteger = (min, max) => { + min = Math.ceil(min) + max = Math.floor(max) + return Math.floor(Math.random() * (max - min) + min) +} diff --git a/psa/test/helpers/resources.js b/psa/test/helpers/resources.js new file mode 100644 index 00000000..c217a5c9 --- /dev/null +++ b/psa/test/helpers/resources.js @@ -0,0 +1,35 @@ +import { GenericContainer as Container } from 'testcontainers' +import { CreateBucketCommand, S3Client } from '@aws-sdk/client-s3' +import { customAlphabet } from 'nanoid' + +const id = customAlphabet('1234567890abcdefghijklmnopqrstuvwxyz', 10) + +/** @param {{ region?: string }} [opts] */ +export const createS3 = async opts => { + console.log('Creating local S3...') + const port = 9000 + const region = opts?.region ?? 'us-west-2' + const container = await new Container('quay.io/minio/minio') + .withCommand(['server', '/data']) + .withExposedPorts(port) + .start() + const endpoint = `http://${container.getHost()}:${container.getMappedPort(port)}` + const clientOpts = { + endpoint, + forcePathStyle: true, + region, + credentials: { + accessKeyId: 'minioadmin', + secretAccessKey: 'minioadmin', + } + } + return { client: new S3Client(clientOpts), endpoint } +} + +/** @param {S3Client} s3 */ +export async function createBucket(s3) { + const name = id() + console.log(`Creating S3 bucket "${name}"...`) + await s3.send(new CreateBucketCommand({ Bucket: name })) + return name +} diff --git a/psa/test/lib.spec.js b/psa/test/lib.spec.js new file mode 100644 index 00000000..b221a6ac --- /dev/null +++ b/psa/test/lib.spec.js @@ -0,0 +1,113 @@ +import { PutObjectCommand } from '@aws-sdk/client-s3' +import { createDudeWhereLocator, createHashEncodedInKeyHasher, createObjectHasher, createObjectLocator } from '../lib.js' +import { encodeCAR, randomBlock } from './helpers/dag.js' +import { createBucket, createS3 } from './helpers/resources.js' + +const s3 = await createS3() + +export const testObjectLocator = { + 'should find object': async (/** @type {import('entail').assert} */ assert) => { + const bucket = await createBucket(s3.client) + const block = randomBlock() + const car = encodeCAR(block.cid, [block]) + const key = `complete/${block.cid}.car` + + await s3.client.send(new PutObjectCommand({ + Bucket: bucket, + Key: key, + Body: car.bytes + })) + + const locator = createObjectLocator(s3.client, bucket, r => `complete/${r}.car`) + const location = await locator.locate(block.cid) + + assert.ok(location) + assert.equal(location.bucket, bucket) + assert.equal(location.key, key) + assert.equal(location.size, car.bytes.length) + } +} + +export const testDudeWhereLocator = { + 'should find object': async (/** @type {import('entail').assert} */ assert) => { + const [indexBucket, dataBucket] = await Promise.all([ + createBucket(s3.client), + createBucket(s3.client) + ]) + const block = randomBlock() + const car = encodeCAR(block.cid, [block]) + const indexKey = `${block.cid}/${car.cid}` + const dataKey = `${car.cid}/${car.cid}.car` + + await Promise.all([ + s3.client.send(new PutObjectCommand({ + Bucket: indexBucket, + Key: indexKey, + Body: new Uint8Array() + })), + s3.client.send(new PutObjectCommand({ + Bucket: dataBucket, + Key: dataKey, + Body: car.bytes + })) + ]) + + const locator = createDudeWhereLocator(s3.client, indexBucket, dataBucket) + const location = await locator.locate(block.cid) + + assert.ok(location) + assert.equal(location.bucket, dataBucket) + assert.equal(location.key, dataKey) + assert.equal(location.size, car.bytes.length) + } +} + +export const testObjectHasher = { + 'should hash object': async (/** @type {import('entail').assert} */ assert) => { + const bucket = await createBucket(s3.client) + const block = randomBlock() + const car = encodeCAR(block.cid, [block]) + const key = `complete/${block.cid}.car` + + await s3.client.send(new PutObjectCommand({ + Bucket: bucket, + Key: key, + Body: car.bytes + })) + + const hasher = createObjectHasher() + const link = await hasher.digest({ + client: s3.client, + root: block.cid, + bucket, + key, + size: car.bytes.length + }) + assert.equal(link.toString(), car.cid.toString()) + } +} + +export const testHashEncodedInKeyHasher = { + 'should hash object': async (/** @type {import('entail').assert} */ assert) => { + const bucket = await createBucket(s3.client) + const block = randomBlock() + const car = encodeCAR(block.cid, [block]) + const key = `${car.cid}/${car.cid}.car` + + await s3.client.send(new PutObjectCommand({ + Bucket: bucket, + Key: key, + Body: new Uint8Array() // purposely incorrect to ensure hash is coming from key + })) + + const hasher = createHashEncodedInKeyHasher() + const link = await hasher.digest({ + client: s3.client, + root: block.cid, + bucket, + key, + size: car.bytes.length + }) + assert.equal(link.toString(), car.cid.toString()) + } +} diff --git a/stacks/psa-stack.js b/stacks/psa-stack.js index 5a0b91ef..015aaed4 100644 --- a/stacks/psa-stack.js +++ b/stacks/psa-stack.js @@ -8,24 +8,32 @@ import { Function } from 'sst/constructs' export function PSAStack ({ stack }) { stack.setDefaultFunctionProps({ runtime: 'nodejs20.x', - architecture: 'arm_64' + architecture: 'arm_64', + environment: { + R2_ENDPOINT: process.env.R2_ENDPOINT ?? '', + R2_REGION: process.env.R2_REGION ?? '', + R2_CARPARK_BUCKET_NAME: process.env.R2_CARPARK_BUCKET_NAME ?? '', + R2_DUDEWHERE_BUCKET_NAME: process.env.R2_DUDEWHERE_BUCKET_NAME ?? '', + R2_ACCESS_KEY_ID: process.env.R2_ACCESS_KEY_ID ?? '', + R2_SECRET_ACCESS_KEY: process.env.R2_SECRET_ACCESS_KEY ?? '' + } }) const hashFunction = new Function(stack, 'hash', { - handler: 'shardutil/functions/hash.handler', + handler: 'psa/functions/hash.handler', url: { cors: true, authorizer: 'none' }, memorySize: '4 GB', timeout: '15 minutes' }) - hashFunction.attachPermissions(['s3:GetObject']) + hashFunction.attachPermissions(['s3:HeadObject', 's3:GetObject']) const downloadFunction = new Function(stack, 'download', { - handler: 'shardutil/functions/download.handler', + handler: 'psa/functions/download.handler', url: { cors: true, authorizer: 'none' } }) - downloadFunction.attachPermissions(['s3:GetObject']) + downloadFunction.attachPermissions(['s3:HeadObject', 's3:GetObject']) stack.addOutputs({ hashFunctionURL: hashFunction.url, diff --git a/tsconfig.json b/tsconfig.json index fe4a6e27..82d01aa1 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -45,7 +45,8 @@ "ucan-invocation", "filecoin", "tools", - "lib" + "lib", + "psa" ], "exclude": ["billing/coverage", "upload-api/dist"] }