Skip to content

Commit

Permalink
feat: add redundancy options and utils (#896)
Browse files Browse the repository at this point in the history
* feat: add redundancy options and utils

* chore: lowercase header
  • Loading branch information
Cafe137 authored Mar 27, 2024
1 parent 1408ca9 commit 29899ed
Show file tree
Hide file tree
Showing 8 changed files with 292 additions and 28 deletions.
11 changes: 6 additions & 5 deletions src/bee.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ import type {
Tag,
Topic,
UploadOptions,
UploadRedundancyOptions,
UploadResultWithCid,
} from './types'
import {
Expand Down Expand Up @@ -143,7 +144,7 @@ export class Bee {
async uploadData(
postageBatchId: string | BatchId,
data: string | Uint8Array,
options?: UploadOptions,
options?: UploadOptions & UploadRedundancyOptions,
requestOptions?: BeeRequestOptions,
): Promise<UploadResult> {
assertBatchId(postageBatchId)
Expand Down Expand Up @@ -264,7 +265,7 @@ export class Bee {
postageBatchId: string | BatchId,
data: string | Uint8Array | Readable | File,
name?: string,
options?: FileUploadOptions,
options?: FileUploadOptions & UploadRedundancyOptions,
requestOptions?: BeeRequestOptions,
): Promise<UploadResultWithCid> {
assertBatchId(postageBatchId)
Expand Down Expand Up @@ -377,7 +378,7 @@ export class Bee {
async uploadFiles(
postageBatchId: string | BatchId,
fileList: FileList | File[],
options?: CollectionUploadOptions,
options?: CollectionUploadOptions & UploadRedundancyOptions,
requestOptions?: BeeRequestOptions,
): Promise<UploadResultWithCid> {
assertBatchId(postageBatchId)
Expand Down Expand Up @@ -405,7 +406,7 @@ export class Bee {
async uploadCollection(
postageBatchId: string | BatchId,
collection: Collection<Uint8Array | Readable>,
options?: CollectionUploadOptions,
options?: CollectionUploadOptions & UploadRedundancyOptions,
): Promise<UploadResultWithCid> {
assertBatchId(postageBatchId)
assertCollection(collection)
Expand Down Expand Up @@ -437,7 +438,7 @@ export class Bee {
async uploadFilesFromDirectory(
postageBatchId: string | BatchId,
dir: string,
options?: CollectionUploadOptions,
options?: CollectionUploadOptions & UploadRedundancyOptions,
requestOptions?: BeeRequestOptions,
): Promise<UploadResultWithCid> {
assertBatchId(postageBatchId)
Expand Down
2 changes: 1 addition & 1 deletion src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export * from './utils/error'
export * as Utils from './utils/expose'
export { Bee, BeeDebug }

// for requrie-like imports
// for require-like imports
declare global {
interface Window {
// binded as 'BeeJs' via Webpack
Expand Down
26 changes: 21 additions & 5 deletions src/modules/bytes.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,16 @@
import type { BatchId, BeeRequestOptions, Data, Reference, ReferenceOrEns, UploadOptions } from '../types'
import type {
BatchId,
BeeRequestOptions,
Data,
DownloadRedundancyOptions,
Reference,
ReferenceOrEns,
UploadOptions,
UploadRedundancyOptions,
} from '../types'
import { UploadResult } from '../types'
import { wrapBytesWithHelpers } from '../utils/bytes'
import { extractUploadHeaders } from '../utils/headers'
import { extractDownloadHeaders, extractRedundantUploadHeaders } from '../utils/headers'
import { http } from '../utils/http'
import { makeTagUid } from '../utils/type'

Expand All @@ -19,7 +28,7 @@ export async function upload(
requestOptions: BeeRequestOptions,
data: string | Uint8Array,
postageBatchId: BatchId,
options?: UploadOptions,
options?: UploadOptions & UploadRedundancyOptions,
): Promise<UploadResult> {
const response = await http<{ reference: Reference }>(requestOptions, {
url: endpoint,
Expand All @@ -28,7 +37,7 @@ export async function upload(
data,
headers: {
'content-type': 'application/octet-stream',
...extractUploadHeaders(postageBatchId, options),
...extractRedundantUploadHeaders(postageBatchId, options),
},
})

Expand All @@ -44,10 +53,15 @@ export async function upload(
* @param ky
* @param hash Bee content reference
*/
export async function download(requestOptions: BeeRequestOptions, hash: ReferenceOrEns): Promise<Data> {
export async function download(
requestOptions: BeeRequestOptions,
hash: ReferenceOrEns,
options?: DownloadRedundancyOptions,
): Promise<Data> {
const response = await http<ArrayBuffer>(requestOptions, {
responseType: 'arraybuffer',
url: `${endpoint}/${hash}`,
headers: extractDownloadHeaders(options),
})

return wrapBytesWithHelpers(new Uint8Array(response.data))
Expand All @@ -62,10 +76,12 @@ export async function download(requestOptions: BeeRequestOptions, hash: Referenc
export async function downloadReadable(
requestOptions: BeeRequestOptions,
hash: ReferenceOrEns,
options?: DownloadRedundancyOptions,
): Promise<ReadableStream<Uint8Array>> {
const response = await http<ReadableStream<Uint8Array>>(requestOptions, {
responseType: 'stream',
url: `${endpoint}/${hash}`,
headers: extractDownloadHeaders(options),
})

return response.data
Expand Down
37 changes: 26 additions & 11 deletions src/modules/bzz.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,19 @@ import {
Collection,
CollectionUploadOptions,
Data,
DownloadRedundancyOptions,
FileData,
FileUploadOptions,
Readable,
Reference,
ReferenceOrEns,
UploadHeaders,
UploadRedundancyOptions,
UploadResult,
} from '../types'
import { wrapBytesWithHelpers } from '../utils/bytes'
import { assertCollection } from '../utils/collection'
import { extractUploadHeaders, readFileHeaders } from '../utils/headers'
import { extractDownloadHeaders, extractRedundantUploadHeaders, readFileHeaders } from '../utils/headers'
import { http } from '../utils/http'
import { isReadable } from '../utils/stream'
import { makeTar } from '../utils/tar'
Expand All @@ -27,8 +29,11 @@ interface FileUploadHeaders extends UploadHeaders {
'content-type'?: string
}

function extractFileUploadHeaders(postageBatchId: BatchId, options?: FileUploadOptions): FileUploadHeaders {
const headers: FileUploadHeaders = extractUploadHeaders(postageBatchId, options)
function extractFileUploadHeaders(
postageBatchId: BatchId,
options?: FileUploadOptions & UploadRedundancyOptions,
): FileUploadHeaders {
const headers: FileUploadHeaders = extractRedundantUploadHeaders(postageBatchId, options)

if (options?.size) headers['content-length'] = String(options.size)

Expand All @@ -51,10 +56,12 @@ export async function uploadFile(
data: string | Uint8Array | Readable | ArrayBuffer,
postageBatchId: BatchId,
name?: string,
options?: FileUploadOptions,
options?: FileUploadOptions & UploadRedundancyOptions,
): Promise<UploadResult> {
if (isReadable(data) && !options?.contentType) {
if (!options) options = {}
if (!options) {
options = {}
}
options.contentType = 'application/octet-stream'
}

Expand Down Expand Up @@ -86,11 +93,13 @@ export async function downloadFile(
requestOptions: BeeRequestOptions,
hash: ReferenceOrEns,
path = '',
options?: DownloadRedundancyOptions,
): Promise<FileData<Data>> {
const response = await http<ArrayBuffer>(requestOptions, {
method: 'GET',
responseType: 'arraybuffer',
url: `${bzzEndpoint}/${hash}/${path}`,
headers: extractDownloadHeaders(options),
})
const file = {
...readFileHeaders(response.headers as Record<string, string>),
Expand All @@ -111,11 +120,13 @@ export async function downloadFileReadable(
requestOptions: BeeRequestOptions,
hash: ReferenceOrEns,
path = '',
options?: DownloadRedundancyOptions,
): Promise<FileData<ReadableStream<Uint8Array>>> {
const response = await http<ReadableStream<Uint8Array>>(requestOptions, {
method: 'GET',
responseType: 'stream',
url: `${bzzEndpoint}/${hash}/${path}`,
headers: extractDownloadHeaders(options),
})
const file = {
...readFileHeaders(response.headers as Record<string, string>),
Expand All @@ -136,13 +147,17 @@ interface CollectionUploadHeaders extends UploadHeaders {

function extractCollectionUploadHeaders(
postageBatchId: BatchId,
options?: CollectionUploadOptions,
): CollectionUploadHeaders {
const headers: CollectionUploadHeaders = extractUploadHeaders(postageBatchId, options)
options?: CollectionUploadOptions & UploadRedundancyOptions,
): CollectionUploadHeaders & UploadRedundancyOptions {
const headers: CollectionUploadHeaders = extractRedundantUploadHeaders(postageBatchId, options)

if (options?.indexDocument) headers['swarm-index-document'] = options.indexDocument
if (options?.indexDocument) {
headers['swarm-index-document'] = options.indexDocument
}

if (options?.errorDocument) headers['swarm-error-document'] = options.errorDocument
if (options?.errorDocument) {
headers['swarm-error-document'] = options.errorDocument
}

return headers
}
Expand All @@ -158,7 +173,7 @@ export async function uploadCollection(
requestOptions: BeeRequestOptions,
collection: Collection<Uint8Array>,
postageBatchId: BatchId,
options?: CollectionUploadOptions,
options?: CollectionUploadOptions & UploadRedundancyOptions,
): Promise<UploadResult> {
assertCollection(collection)
const tarData = makeTar(collection)
Expand Down
48 changes: 48 additions & 0 deletions src/types/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,54 @@ export interface UploadOptions {
deferred?: boolean
}

/**
* Add redundancy to the data being uploaded so that downloaders can download it with better UX.
* 0 value is default and does not add any redundancy to the file.
*/
export enum RedundancyLevel {
OFF = 0,
MEDIUM = 1,
STRONG = 2,
INSANE = 3,
PARANOID = 4,
}

export interface UploadRedundancyOptions {
redundancyLevel?: RedundancyLevel
}

/**
* Specify the retrieve strategy on redundant data.
* The possible values are NONE, DATA, PROX and RACE.
* Strategy NONE means no prefetching takes place.
* Strategy DATA means only data chunks are prefetched.
* Strategy PROX means only chunks that are close to the node are prefetched.
* Strategy RACE means all chunks are prefetched: n data chunks and k parity chunks. The first n chunks to arrive are used to reconstruct the file.
* Multiple strategies can be used in a fallback cascade if the swarm redundancy fallback mode is set to true.
* The default strategy is NONE, DATA, falling back to PROX, falling back to RACE
*/
export enum RedundancyStrategy {
NONE = 0,
DATA = 1,
PROX = 2,
RACE = 3,
}

export interface DownloadRedundancyOptions {
/**
* Specify the retrieve strategy on redundant data.
*/
redundancyStrategy?: RedundancyStrategy
/**
* Specify if the retrieve strategies (chunk prefetching on redundant data) are used in a fallback cascade. The default is true.
*/
fallback?: boolean
/**
* Specify the timeout for chunk retrieval. The default is 30 seconds.
*/
timeoutMs?: number
}

export interface FileUploadOptions extends UploadOptions {
/**
* Specifies Content-Length for the given data. It is required when uploading with Readable.
Expand Down
4 changes: 3 additions & 1 deletion src/utils/expose.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,10 @@ export {
getDepthForCapacity,
getStampCostInBzz,
getStampCostInPlur,
getStampMaximumCapacityBytes,
getStampEffectiveBytes,
getStampMaximumCapacityBytes,
getStampTtlSeconds,
getStampUsage,
} from './stamps'

export { approximateOverheadForRedundancyLevel, getRedundancyStat, getRedundancyStats } from './redundancy'
49 changes: 44 additions & 5 deletions src/utils/headers.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { BatchId, FileHeaders, UploadOptions } from '../types'
import { BatchId, DownloadRedundancyOptions, FileHeaders, UploadOptions, UploadRedundancyOptions } from '../types'
import { BeeError } from './error'

/**
Expand Down Expand Up @@ -53,13 +53,52 @@ export function extractUploadHeaders(postageBatchId: BatchId, options?: UploadOp
'swarm-postage-batch-id': postageBatchId,
}

if (options?.pin) headers['swarm-pin'] = String(options.pin)
if (options?.pin) {
headers['swarm-pin'] = String(options.pin)
}

if (options?.encrypt) {
headers['swarm-encrypt'] = String(options.encrypt)
}

if (options?.tag) {
headers['swarm-tag'] = String(options.tag)
}

if (typeof options?.deferred === 'boolean') {
headers['swarm-deferred-upload'] = options.deferred.toString()
}

return headers
}

if (options?.encrypt) headers['swarm-encrypt'] = String(options.encrypt)
export function extractRedundantUploadHeaders(
postageBatchId: BatchId,
options?: UploadOptions & UploadRedundancyOptions,
): Record<string, string> {
const headers = extractUploadHeaders(postageBatchId, options)

if (options?.tag) headers['swarm-tag'] = String(options.tag)
if (options?.redundancyLevel) {
headers['swarm-redundancy-level'] = String(options.redundancyLevel)
}

return headers
}

export function extractDownloadHeaders(options?: DownloadRedundancyOptions): Record<string, string> {
const headers: Record<string, string> = {}

if (typeof options?.deferred === 'boolean') headers['swarm-deferred-upload'] = options.deferred.toString()
if (options?.redundancyStrategy) {
headers['swarm-redundancy-strategy'] = String(options.redundancyStrategy)
}

if (options?.fallback === false) {
headers['swarm-redundancy-fallback-mode'] = 'false'
}

if (options?.timeoutMs !== undefined) {
headers['swarm-chunk-retrieval-timeout'] = String(options.timeoutMs)
}

return headers
}
Loading

0 comments on commit 29899ed

Please sign in to comment.