Skip to content

Commit

Permalink
Merge IndexFile to BootloaderFile
Browse files Browse the repository at this point in the history
  • Loading branch information
buptsb committed Jan 8, 2024
1 parent 0a70d5b commit cb44105
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 169 deletions.
110 changes: 12 additions & 98 deletions protobuf/gen/protobuf/v1/jpeg_file.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,18 +38,9 @@ export interface PbFilePointer {
resources: PbResourceURL[];
}

/** could be a linked list */
export interface PbIndexFile {
$type: "protobuf.v1.PbIndexFile";
ended: boolean;
chunks: PbFilePointer[];
/** next segment of the index file */
next: PbFilePointer | undefined;
}

export interface PbBootloaderFile {
$type: "protobuf.v1.PbBootloaderFile";
indexFileHead: PbFilePointer | undefined;
chunks: PbFilePointer[];
fileSize: number;
chunkSize: number;
fileName: string;
Expand Down Expand Up @@ -358,88 +349,10 @@ export const PbFilePointer = {

messageTypeRegistry.set(PbFilePointer.$type, PbFilePointer);

function createBasePbIndexFile(): PbIndexFile {
return { $type: "protobuf.v1.PbIndexFile", ended: false, chunks: [], next: undefined };
}

export const PbIndexFile = {
$type: "protobuf.v1.PbIndexFile" as const,

encode(message: PbIndexFile, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.ended === true) {
writer.uint32(8).bool(message.ended);
}
for (const v of message.chunks) {
PbFilePointer.encode(v!, writer.uint32(18).fork()).ldelim();
}
if (message.next !== undefined) {
PbFilePointer.encode(message.next, writer.uint32(26).fork()).ldelim();
}
return writer;
},

decode(input: _m0.Reader | Uint8Array, length?: number): PbIndexFile {
const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input);
let end = length === undefined ? reader.len : reader.pos + length;
const message = createBasePbIndexFile();
while (reader.pos < end) {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
message.ended = reader.bool();
break;
case 2:
message.chunks.push(PbFilePointer.decode(reader, reader.uint32()));
break;
case 3:
message.next = PbFilePointer.decode(reader, reader.uint32());
break;
default:
reader.skipType(tag & 7);
break;
}
}
return message;
},

fromJSON(object: any): PbIndexFile {
return {
$type: PbIndexFile.$type,
ended: isSet(object.ended) ? Boolean(object.ended) : false,
chunks: Array.isArray(object?.chunks) ? object.chunks.map((e: any) => PbFilePointer.fromJSON(e)) : [],
next: isSet(object.next) ? PbFilePointer.fromJSON(object.next) : undefined,
};
},

toJSON(message: PbIndexFile): unknown {
const obj: any = {};
message.ended !== undefined && (obj.ended = message.ended);
if (message.chunks) {
obj.chunks = message.chunks.map((e) => e ? PbFilePointer.toJSON(e) : undefined);
} else {
obj.chunks = [];
}
message.next !== undefined && (obj.next = message.next ? PbFilePointer.toJSON(message.next) : undefined);
return obj;
},

fromPartial<I extends Exact<DeepPartial<PbIndexFile>, I>>(object: I): PbIndexFile {
const message = createBasePbIndexFile();
message.ended = object.ended ?? false;
message.chunks = object.chunks?.map((e) => PbFilePointer.fromPartial(e)) || [];
message.next = (object.next !== undefined && object.next !== null)
? PbFilePointer.fromPartial(object.next)
: undefined;
return message;
},
};

messageTypeRegistry.set(PbIndexFile.$type, PbIndexFile);

function createBasePbBootloaderFile(): PbBootloaderFile {
return {
$type: "protobuf.v1.PbBootloaderFile",
indexFileHead: undefined,
chunks: [],
fileSize: 0,
chunkSize: 0,
fileName: "",
Expand All @@ -453,8 +366,8 @@ export const PbBootloaderFile = {
$type: "protobuf.v1.PbBootloaderFile" as const,

encode(message: PbBootloaderFile, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer {
if (message.indexFileHead !== undefined) {
PbFilePointer.encode(message.indexFileHead, writer.uint32(10).fork()).ldelim();
for (const v of message.chunks) {
PbFilePointer.encode(v!, writer.uint32(10).fork()).ldelim();
}
if (message.fileSize !== 0) {
writer.uint32(16).uint32(message.fileSize);
Expand Down Expand Up @@ -485,7 +398,7 @@ export const PbBootloaderFile = {
const tag = reader.uint32();
switch (tag >>> 3) {
case 1:
message.indexFileHead = PbFilePointer.decode(reader, reader.uint32());
message.chunks.push(PbFilePointer.decode(reader, reader.uint32()));
break;
case 2:
message.fileSize = reader.uint32();
Expand Down Expand Up @@ -516,7 +429,7 @@ export const PbBootloaderFile = {
fromJSON(object: any): PbBootloaderFile {
return {
$type: PbBootloaderFile.$type,
indexFileHead: isSet(object.indexFileHead) ? PbFilePointer.fromJSON(object.indexFileHead) : undefined,
chunks: Array.isArray(object?.chunks) ? object.chunks.map((e: any) => PbFilePointer.fromJSON(e)) : [],
fileSize: isSet(object.fileSize) ? Number(object.fileSize) : 0,
chunkSize: isSet(object.chunkSize) ? Number(object.chunkSize) : 0,
fileName: isSet(object.fileName) ? String(object.fileName) : "",
Expand All @@ -528,8 +441,11 @@ export const PbBootloaderFile = {

toJSON(message: PbBootloaderFile): unknown {
const obj: any = {};
message.indexFileHead !== undefined &&
(obj.indexFileHead = message.indexFileHead ? PbFilePointer.toJSON(message.indexFileHead) : undefined);
if (message.chunks) {
obj.chunks = message.chunks.map((e) => e ? PbFilePointer.toJSON(e) : undefined);
} else {
obj.chunks = [];
}
message.fileSize !== undefined && (obj.fileSize = Math.round(message.fileSize));
message.chunkSize !== undefined && (obj.chunkSize = Math.round(message.chunkSize));
message.fileName !== undefined && (obj.fileName = message.fileName);
Expand All @@ -544,9 +460,7 @@ export const PbBootloaderFile = {

fromPartial<I extends Exact<DeepPartial<PbBootloaderFile>, I>>(object: I): PbBootloaderFile {
const message = createBasePbBootloaderFile();
message.indexFileHead = (object.indexFileHead !== undefined && object.indexFileHead !== null)
? PbFilePointer.fromPartial(object.indexFileHead)
: undefined;
message.chunks = object.chunks?.map((e) => PbFilePointer.fromPartial(e)) || [];
message.fileSize = object.fileSize ?? 0;
message.chunkSize = object.chunkSize ?? 0;
message.fileName = object.fileName ?? "";
Expand Down
13 changes: 3 additions & 10 deletions protobuf/v1/jpeg_file.proto
Original file line number Diff line number Diff line change
Expand Up @@ -31,16 +31,9 @@ message PbFilePointer {
repeated PbResourceURL resources = 4;
}

// could be a linked list
message PbIndexFile {
required bool ended = 1;
repeated PbFilePointer chunks = 2;
// next segment of the index file
optional PbFilePointer next = 3;
}

message PbBootloaderFile {
required PbFilePointer index_file_head = 1;
repeated PbFilePointer chunks = 1;

required uint32 file_size = 2;
required uint32 chunk_size = 3;
required string file_name = 4;
Expand All @@ -51,4 +44,4 @@ message PbBootloaderFile {

// sha256 of the whole file
required bytes checksum = 7;
}
}
87 changes: 26 additions & 61 deletions src/file.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { UsedBits } from "./bits-manipulation";
import { sinkDelegate } from "./sinks";
import { ChunksHelper, cachedChunk, readRequest, BlockingQueue } from "./chunks";
import { CipherConfig, SinkDownloadConfig, SinkUploadConfig } from './config';
import { PbIndexFile, PbBootloaderFile, PbFilePointer, GenDescString, BootloaderDescription, ParseDescString } from "../protobuf";
import { PbBootloaderFile, PbFilePointer, GenDescString, BootloaderDescription, ParseDescString } from "../protobuf";
import { MessageType, messageTypeRegistry, UnknownMessage } from '../protobuf/gen/typeRegistry';
import { EncoderType, DecoderType, SinkType } from './common-types';
import { NewCipherConfigFromPassword } from "./encryption"
Expand Down Expand Up @@ -46,60 +46,10 @@ class RawDataFile extends BaseFile {
}
}

class IndexFile extends BaseFile {
private log = debugLogger.extend('index');
public indexFile: PbIndexFile;

constructor() {
super();
}

static async CreateForDownload(indexFilePointer: PbFilePointer, downloadConfig: SinkDownloadConfig) {
const file = new IndexFile();
file.indexFile = await file.download<PbIndexFile>(PbIndexFile, indexFilePointer, downloadConfig);
file.log("create indexFilePointer/indexFile:", indexFilePointer, file.indexFile);
return file;
}

async GenIndexFile(chunks: PbFilePointer[], uploadConfig: SinkUploadConfig) {
const indexFile: PbIndexFile = {
$type: PbIndexFile.$type,
chunks: chunks,
// TODO: support split index file into multiple files
ended: true,
next: undefined,
}
this.log("Gen from:", indexFile);
const indexFilePtr = await this.upload(indexFile, uploadConfig, "indexFile");
this.log("Gen result: ", indexFilePtr);
return indexFilePtr;
}

DownloadChunksWithWorkerPool(chunkIndexes: number[], downloadConfig: SinkDownloadConfig) {
if (chunkIndexes.length === 0) {
return rx.of({
decoded: Buffer.alloc(0),
index: 0,
});
}
const targetChunks = _.pullAt(this.indexFile.chunks, chunkIndexes);
return sinkDelegate.DownloadMultiple(targetChunks, downloadConfig).pipe(
// from index number in download sequences to index number in the file chunks
rx.map(chunk => {
return {
decoded: chunk.decoded,
index: chunkIndexes[chunk.index],
};
})
);
}
}

class BootloaderFile extends BaseFile {
private log = debugLogger.extend('bootloader');

public blFile: PbBootloaderFile;
public indexFile: IndexFile;
public dataDownloadConfig: SinkDownloadConfig;
public helper: ChunksHelper;

Expand All @@ -111,14 +61,14 @@ class BootloaderFile extends BaseFile {
const file = new BootloaderFile();
const blFile = await file.download<PbBootloaderFile>(PbBootloaderFile, blFilePointer, downloadConfig);
const dataDownloadConfig = new SinkDownloadConfig(
UsedBits.fromString(blFile.indexFileHead!.usedBits), // usedBits
// UsedBits.fromString(blFile.usedBits), // usedBits
downloadConfig.usedBits,
new CipherConfig("aes-128-gcm", Buffer.from(blFile.aesKey), Buffer.from(blFile.aesIv)),
downloadConfig.concurrency,
DecoderType.wasmDecoder, // decoder
null, // abort signal
);
file.blFile = blFile;
file.indexFile = await IndexFile.CreateForDownload(blFile.indexFileHead!, dataDownloadConfig);
file.dataDownloadConfig = dataDownloadConfig;
file.log("create ptr/file/dataDownloadConfig", blFilePointer, blFile, dataDownloadConfig);
return file;
Expand All @@ -138,13 +88,13 @@ class BootloaderFile extends BaseFile {
aesKey: Uint8Array,
aesIv: Uint8Array,
checksum: Uint8Array,
indexFileHead: PbFilePointer,
chunks: PbFilePointer[],
uploadConfig: SinkUploadConfig,
blPassword: Uint8Array,
) {
const blFile: PbBootloaderFile = {
$type: PbBootloaderFile.$type,
indexFileHead,
chunks,
fileSize,
chunkSize,
fileName,
Expand Down Expand Up @@ -172,7 +122,7 @@ class BootloaderFile extends BaseFile {
}
const helper = new ChunksHelper(this.blFile.fileSize, this.blFile.chunkSize, requests);
this.log("requests/targetReadChunkIndexes", requests, helper.targetReadChunkIndexes);
return this.indexFile.DownloadChunksWithWorkerPool(
return this.DownloadChunksWithWorkerPool(
helper.targetReadChunkIndexes,
this.dataDownloadConfig,
).pipe(
Expand All @@ -182,6 +132,25 @@ class BootloaderFile extends BaseFile {
rx.mergeAll(),
);
}

DownloadChunksWithWorkerPool(chunkIndexes: number[], downloadConfig: SinkDownloadConfig) {
if (chunkIndexes.length === 0) {
return rx.of({
decoded: Buffer.alloc(0),
index: 0,
});
}
const targetChunks = _.pullAt(this.blFile.chunks, chunkIndexes);
return sinkDelegate.DownloadMultiple(targetChunks, downloadConfig).pipe(
// from index number in download sequences to index number in the file chunks
rx.map(chunk => {
return {
decoded: chunk.decoded,
index: chunkIndexes[chunk.index],
};
})
);
}
}

const realfs = fs;
Expand Down Expand Up @@ -282,10 +251,6 @@ export class UploadFile {
"chunks"
);

// step 2. create/upload index file(s)
const indexFile = new IndexFile();
const indexFileHead = await indexFile.GenIndexFile(filePtrs, this.dataUploadConfig);

// step 2. create/upload bootloader file
const bootloaderFile = new BootloaderFile()
const descStr = await bootloaderFile.GenDescription(
Expand All @@ -295,7 +260,7 @@ export class UploadFile {
this.aesKey,
this.aesIv,
this.checksum.digest(),
indexFileHead,
filePtrs,
this.blUploadConfig,
this.blPassword,
);
Expand Down

0 comments on commit cb44105

Please sign in to comment.