Skip to content
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions packages/verified-fetch/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -347,6 +347,39 @@ if (res.headers.get('Content-Type') === 'application/json') {
console.info(obj) // ...
```

## The `Accept` header

The `Accept` header can be passed to override certain response processing, or to ensure that the final `Content-Type` of the response is the one that is expected.

If the final `Content-Type` does not match the `Accept` header, or if the content cannot be represented in the format dictated by the `Accept` header, or you have configured a custom content type parser, and that parser returns a value that isn't in the accept header, a [406: Not Acceptible](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/406) response will be returned:

```typescript
import { verifiedFetch } from '@helia/verified-fetch'

const res = await verifiedFetch('ipfs://bafyJPEGImageCID', {
headers: {
accept: 'image/png'
}
})

console.info(res.status) // 406 - the image was a JPEG but we specified PNG as the accept header
```

It can also be used to skip processing the data from some formats such as `DAG-CBOR` if you wish to handle decoding it yourself:

```typescript
import { verifiedFetch } from '@helia/verified-fetch'

const res = await verifiedFetch('ipfs://bafyDAGCBORCID', {
headers: {
accept: 'application/octet-stream'
}
})

console.info(res.headers.get('accept')) // application/octet-stream
const buf = await res.arrayBuffer() // raw bytes, not processed as JSON
```

## Comparison to fetch

This module attempts to act as similarly to the `fetch()` API as possible.
Expand Down
20 changes: 17 additions & 3 deletions packages/verified-fetch/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -142,21 +142,31 @@
},
"dependencies": {
"@helia/block-brokers": "^2.0.1",
"@helia/car": "^3.0.0",
"@helia/http": "^1.0.1",
"@helia/interface": "^4.0.0",
"@helia/ipns": "^6.0.0",
"@helia/routers": "^1.0.0",
"@helia/unixfs": "^3.0.0",
"@ipld/car": "^5.2.6",
"@ipld/dag-cbor": "^9.2.0",
"@ipld/dag-json": "^10.2.0",
"@ipld/dag-pb": "^4.1.0",
"@libp2p/interface": "^1.1.2",
"@libp2p/kad-dht": "^12.0.7",
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is only here to be able to deserialize stored IPNS records that are wrapped in a libp2p record.

Longer term we may wish to split the libp2p record code out of the dht code to make the bundle size a bit smaller.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we have a tracking issue for this?

"@libp2p/peer-id": "^4.0.5",
"cborg": "^4.0.9",
"hashlru": "^2.3.0",
"interface-blockstore": "^5.2.10",
"interface-datastore": "^8.2.11",
"ipfs-unixfs-exporter": "^13.5.0",
"it-map": "^3.0.5",
"it-pipe": "^3.0.1",
"it-tar": "^6.0.4",
"it-to-browser-readablestream": "^2.0.6",
"multiformats": "^13.1.0",
"progress-events": "^1.0.0"
"progress-events": "^1.0.0",
"uint8arrays": "^5.0.2"
},
"devDependencies": {
"@helia/dag-cbor": "^3.0.0",
Expand All @@ -169,14 +179,18 @@
"@types/sinon": "^17.0.3",
"aegir": "^42.2.2",
"blockstore-core": "^4.4.0",
"browser-readablestream-to-it": "^2.0.5",
"datastore-core": "^9.2.8",
"helia": "^4.0.1",
"ipfs-unixfs-importer": "^15.2.4",
"ipns": "^9.0.0",
"it-all": "^3.0.4",
"it-last": "^3.0.4",
"it-to-buffer": "^4.0.5",
"magic-bytes.js": "^1.8.0",
"p-defer": "^4.0.0",
"sinon": "^17.0.1",
"sinon-ts": "^2.0.0",
"uint8arrays": "^5.0.1"
"sinon-ts": "^2.0.0"
},
"sideEffects": false
}
37 changes: 37 additions & 0 deletions packages/verified-fetch/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,39 @@
* console.info(obj) // ...
* ```
*
* ## The `Accept` header
*
* The `Accept` header can be passed to override certain response processing, or to ensure that the final `Content-Type` of the response is the one that is expected.
*
* If the final `Content-Type` does not match the `Accept` header, or if the content cannot be represented in the format dictated by the `Accept` header, or you have configured a custom content type parser, and that parser returns a value that isn't in the accept header, a [406: Not Acceptible](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/406) response will be returned:
*
* ```typescript
* import { verifiedFetch } from '@helia/verified-fetch'
*
* const res = await verifiedFetch('ipfs://bafyJPEGImageCID', {
* headers: {
* accept: 'image/png'
* }
* })
*
* console.info(res.status) // 406 - the image was a JPEG but we specified PNG as the accept header
* ```
*
* It can also be used to skip processing the data from some formats such as `DAG-CBOR` if you wish to handle decoding it yourself:
*
* ```typescript
* import { verifiedFetch } from '@helia/verified-fetch'
*
* const res = await verifiedFetch('ipfs://bafyDAGCBORCID', {
* headers: {
* accept: 'application/octet-stream'
* }
* })
*
* console.info(res.headers.get('accept')) // application/octet-stream
* const buf = await res.arrayBuffer() // raw bytes, not processed as JSON
* ```
*
* ## Comparison to fetch
*
* This module attempts to act as similarly to the `fetch()` API as possible.
Expand Down Expand Up @@ -449,6 +482,10 @@ import type { ProgressEvent, ProgressOptions } from 'progress-events'
*/
export type Resource = string | CID

export interface ResourceDetail {
resource: Resource
}

export interface CIDDetail {
cid: CID
path: string
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
/**
* Takes a filename URL param and returns a string for use in a
* `Content-Disposition` header
*/
export function getContentDispositionFilename (filename: string): string {
const asciiOnly = replaceNonAsciiCharacters(filename)

if (asciiOnly === filename) {
return `filename="${filename}"`
}

return `filename="${asciiOnly}"; filename*=UTF-8''${encodeURIComponent(filename)}`
}

function replaceNonAsciiCharacters (filename: string): string {
// eslint-disable-next-line no-control-regex
return filename.replace(/[^\x00-\x7F]/g, '_')
}
68 changes: 68 additions & 0 deletions packages/verified-fetch/src/utils/get-tar-stream.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import { CodeError } from '@libp2p/interface'
import { exporter, recursive, type UnixFSEntry } from 'ipfs-unixfs-exporter'
import map from 'it-map'
import { pipe } from 'it-pipe'
import { pack, type TarEntryHeader, type TarImportCandidate } from 'it-tar'
import type { AbortOptions } from '@libp2p/interface'
import type { Blockstore } from 'interface-blockstore'

const EXPORTABLE = ['file', 'raw', 'directory']

function toHeader (file: UnixFSEntry): Partial<TarEntryHeader> & { name: string } {
let mode: number | undefined
let mtime: Date | undefined

if (file.type === 'file' || file.type === 'directory') {
mode = file.unixfs.mode
mtime = file.unixfs.mtime != null ? new Date(Number(file.unixfs.mtime.secs * 1000n)) : undefined
}

return {
name: file.path,
mode,
mtime,
size: Number(file.size),
type: file.type === 'directory' ? 'directory' : 'file'
}
}

function toTarImportCandidate (entry: UnixFSEntry): TarImportCandidate {
if (!EXPORTABLE.includes(entry.type)) {
throw new CodeError('Not a UnixFS node', 'ERR_NOT_UNIXFS')
}

const candidate: TarImportCandidate = {
header: toHeader(entry)
}

if (entry.type === 'file' || entry.type === 'raw') {
candidate.body = entry.content()
}

return candidate
}

export async function * tarStream (ipfsPath: string, blockstore: Blockstore, options?: AbortOptions): AsyncGenerator<Uint8Array> {
const file = await exporter(ipfsPath, blockstore, options)

if (file.type === 'file' || file.type === 'raw') {
yield * pipe(
[toTarImportCandidate(file)],
pack()
)

return
}

if (file.type === 'directory') {
yield * pipe(
recursive(ipfsPath, blockstore, options),
(source) => map(source, (entry) => toTarImportCandidate(entry)),
pack()
)

return
}

throw new CodeError('Not a UnixFS node', 'ERR_NOT_UNIXFS')
}
12 changes: 11 additions & 1 deletion packages/verified-fetch/src/utils/parse-url-string.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ export interface ParseUrlStringOptions extends ProgressOptions<ResolveProgressEv

export interface ParsedUrlQuery extends Record<string, string | unknown> {
format?: RequestFormatShorthand
download?: boolean
filename?: string
}

export interface ParsedUrlStringResults {
Expand Down Expand Up @@ -109,14 +111,22 @@ export async function parseUrlString ({ urlString, ipns, logger }: ParseUrlStrin
}

// parse query string
const query: Record<string, string> = {}
const query: Record<string, any> = {}

if (queryString != null && queryString.length > 0) {
const queryParts = queryString.split('&')
for (const part of queryParts) {
const [key, value] = part.split('=')
query[key] = decodeURIComponent(value)
}

if (query.download != null) {
query.download = query.download === 'true'
}

if (query.filename != null) {
query.filename = query.filename.toString()
}
}

/**
Expand Down
Loading