Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/stream resource from frontend #324

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
d6c7321
Fix existing highliting and add orange highlight for previously modified
May 24, 2023
e8606b5
add range into get_data request and test
May 25, 2023
beaf8bd
lint
May 25, 2023
711fbb7
replace all data.byteLength with dataLenPromise
May 30, 2023
cbb9908
Lazily grabs data but resets the address to 0
May 30, 2023
6541625
Append new data to existing data
May 30, 2023
a5d96e1
Use window for data, rapid requests at edges
May 30, 2023
d5718d3
lint
May 30, 2023
cf7d3cd
Remove dataPromise from App.svelte
May 30, 2023
8795db7
20k data requests feels good
May 30, 2023
97751a1
Merge branch 'master' of github.com:dannyp303/ofrak into feature/larg…
May 30, 2023
fda107f
Some changes from a different PR made it in
May 30, 2023
6c358ae
Missed one
May 30, 2023
c280477
Fix failing test
May 30, 2023
7b81ad6
simple changes per PR
Jun 5, 2023
037e4d1
lint
Jun 5, 2023
f2394ef
Call get_data_length in JumpToOffset
Jun 5, 2023
8935d97
Add window padding
Jun 5, 2023
63d4539
Undo JumpToOffset change to push to seperate PR
Jun 5, 2023
e9bc52a
Chunk data to upload large files
Jun 7, 2023
e0541d1
remove data from dict after load
Jun 7, 2023
d73155c
add addr to query to avoid out of order errors
Jun 7, 2023
d9bee61
Merge branch 'master' into feature/stream-resource-from-frontend
Jun 7, 2023
7d59d2a
Merge branch 'master' into feature/stream-resource-from-frontend
dannyp303 Jun 7, 2023
b31757a
Merge branch 'master' into feature/stream-resource-from-frontend
Jun 7, 2023
974a005
Merge branch 'feature/stream-resource-from-frontend' of github.com:da…
Jun 7, 2023
9042fbb
Fix typing issue
Jun 7, 2023
34ec277
query type for addr, check None
Jun 7, 2023
304fd9d
Fix mypy
Jun 7, 2023
c244475
Remove debug prints
Jun 7, 2023
5011fe2
Add back original create_root_resource_method and check size in frontend
Jun 7, 2023
08cd92b
Add check in frontnend
Jun 7, 2023
e50e4fa
add test
Jun 7, 2023
3f88e16
Merge branch 'master' into feature/stream-resource-from-frontend
Jun 7, 2023
4fad791
Merge branch 'master' into feature/stream-resource-from-frontend
Jun 14, 2023
6c3a2f1
Changelog update
Jun 14, 2023
5c15d56
Collect chunk promises and send at once
Jun 14, 2023
bb23148
Index chunked data by resource id
Jun 14, 2023
9e51c9d
make mypy smile
Jun 15, 2023
9ce5929
update test
Jun 15, 2023
046a02b
Addressing Jacobs review
Jun 15, 2023
db71436
Merge branch 'master' into feature/stream-resource-from-frontend
rbs-jacob Jun 19, 2023
c6fbb79
Add large file to test
Jun 19, 2023
7a5e916
update int.to_bytes for python3.7
Jun 19, 2023
afea4bf
Update ofrak_core/test_ofrak/unit/test_ofrak_server.py
dannyp303 Jun 19, 2023
60eb2f0
Make file bigger
Jun 19, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
48 changes: 40 additions & 8 deletions frontend/src/StartView.svelte
Expand Up @@ -121,8 +121,21 @@
tryHash = !!window.location.hash;
let mouseX, selectedAnimal;
const warnFileSize = 250 * 1024 * 1024;
const fileChunkSize = warnFileSize;

async function sendChunk(id, f, start) {
let end = Math.min(start + fileChunkSize, f.size);
await fetch(
`${$settings.backendUrl}/root_resource_chunk?id=${id}&start=${start}&end=${end}`,
{
method: "POST",
body: await f.slice(start, end),
}
);
}

async function createRootResource(f) {
let rootModel;
if (
f.size > warnFileSize &&
!window.confirm(
Expand All @@ -134,15 +147,34 @@
showRootResource = false;
return;
}
if (f.size > warnFileSize) {
let id = await fetch(
`${$settings.backendUrl}/init_chunked_root_resource?name=${f.name}&size=${f.size}`,
{ method: "POST" }
).then((r) => r.json());
let chunkStartAddrs = Array.from(
{ length: Math.ceil(f.size / fileChunkSize) },
(v, i) => i * fileChunkSize
);
await Promise.all(
chunkStartAddrs.map((start) => sendChunk(id, f, start))
);

const rootModel = await fetch(
`${$settings.backendUrl}/create_root_resource?name=${f.name}`,
{
method: "POST",
body: await f.arrayBuffer(),
}
).then((r) => r.json());

rootModel = await fetch(
`${$settings.backendUrl}/create_chunked_root_resource?id=${id}&name=${f.name}`,
{
method: "POST",
}
).then((r) => r.json());
} else {
rootModel = await fetch(
`${$settings.backendUrl}/create_root_resource?name=${f.name}`,
{
method: "POST",
body: await f.arrayBuffer(),
}
).then((r) => r.json());
}
rootResource = remote_model_to_resource(rootModel, resources);
$selected = rootModel.id;
}
Expand Down
1 change: 1 addition & 0 deletions ofrak_core/CHANGELOG.md
Expand Up @@ -8,6 +8,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- Add a JFFS2 packer and unpacker. ([#326](https://github.com/redballoonsecurity/ofrak/pull/326))

### Changed
- Support uploading files in chunks to handle files larger than 2GB from the GUI ([#324](https://github.com/redballoonsecurity/ofrak/pull/324))

### Fixed

Expand Down
64 changes: 64 additions & 0 deletions ofrak_core/ofrak/gui/server.py
Expand Up @@ -144,9 +144,13 @@ def __init__(
self.resource_view_context: ResourceViewContext = ResourceViewContext()
self.component_context: ComponentContext = ClientComponentContext()
self.script_builder: ScriptBuilder = ScriptBuilder()
self.resource_builder: Dict[str, Tuple[Resource, memoryview]] = {}
self._app.add_routes(
[
web.post("/create_root_resource", self.create_root_resource),
web.post("/init_chunked_root_resource", self.init_chunked_root_resource),
web.post("/root_resource_chunk", self.root_resource_chunk),
web.post("/create_chunked_root_resource", self.create_chunked_root_resource),
web.get("/get_root_resources", self.get_root_resources),
web.get("/{resource_id}/", self.get_resource),
web.get("/{resource_id}/get_data", self.get_data),
Expand Down Expand Up @@ -260,6 +264,66 @@ async def run_until_cancelled(self): # pragma: no cover
finally:
await self.runner.cleanup()

@exceptions_to_http(SerializedError)
async def init_chunked_root_resource(self, request: Request) -> Response:
name = request.query.get("name")
size_param = request.query.get("size")
if name is None:
raise HTTPBadRequest(reason="Missing resource name from request")
if size_param is None:
raise HTTPBadRequest(reason="Missing chunk size from request")
size = int(size_param)
root_resource: Resource = await self._ofrak_context.create_root_resource(name, b"", (File,))
self.resource_builder[root_resource.get_id().hex()] = (
root_resource,
memoryview(bytearray(b"\x00" * size)),
)
return json_response(root_resource.get_id().hex())

@exceptions_to_http(SerializedError)
async def root_resource_chunk(self, request: Request) -> Response:
id = request.query.get("id")
start_param = request.query.get("start")
end_param = request.query.get("end")
if id is None:
raise HTTPBadRequest(reason="Missing resource id from request")
if start_param is None:
raise HTTPBadRequest(reason="Missing chunk start from request")
if end_param is None:
raise HTTPBadRequest(reason="Missing chunk end from request")
start = int(start_param)
end = int(end_param)
chunk_data = await request.read()
_, data = self.resource_builder[id]
data[start:end] = chunk_data
return json_response([])

@exceptions_to_http(SerializedError)
async def create_chunked_root_resource(self, request: Request) -> Response:
id = request.query.get("id")
name = request.query.get("name")
if id is None:
return HTTPBadRequest(reason="Missing root resource `id` from request")
if name is None:
return HTTPBadRequest(reason="Missing root resource `name` from request")

try:
root_resource, data = self.resource_builder[id]
script_str = rf"""
if root_resource is None:
root_resource = await ofrak_context.create_root_resource_from_file("{name}")"""
root_resource.queue_patch(Range(0, 0), bytearray(data))
await root_resource.save()
await self.script_builder.add_action(root_resource, script_str, ActionType.UNPACK)
if request.remote is not None:
self._job_ids[request.remote] = root_resource.get_job_id()
await self.script_builder.commit_to_script(root_resource)
except Exception as e:
await self.script_builder.clear_script_queue(root_resource)
raise e
self.resource_builder.pop(id)
return json_response(self._serialize_resource(root_resource))

@exceptions_to_http(SerializedError)
async def create_root_resource(self, request: Request) -> Response:
name = request.query.get("name")
Expand Down
37 changes: 37 additions & 0 deletions ofrak_core/test_ofrak/unit/test_ofrak_server.py
@@ -1,6 +1,9 @@
import itertools
import json
import os
import tempfile
from ofrak.ofrak_context import OFRAKContext
from ofrak.resource import Resource
import pytest
import re
import sys
Expand All @@ -26,6 +29,14 @@ def hello_world_elf() -> bytes:
return hello_elf()


@pytest.fixture()
async def large_test_file(ofrak_context: OFRAKContext) -> Resource:
with tempfile.NamedTemporaryFile() as temp:
for i in range(256):
temp.write(int.to_bytes(i, 1, "big") * 1024 * 1024)
yield await ofrak_context.create_root_resource_from_file(temp.name)


@pytest.fixture(scope="session")
def firmware_zip() -> bytes:
assets_dir = os.path.abspath(
Expand Down Expand Up @@ -119,6 +130,32 @@ async def test_create_root_resource(
assert body["tags"] == json_result["tags"]


async def test_create_chunked_root_resource(
ofrak_client: TestClient, ofrak_server, large_test_file
):
test_file_data = await large_test_file.get_data()
chunk_size = int(len(test_file_data) / 10)
init_resp = await ofrak_client.post(
"/init_chunked_root_resource",
params={"name": "test_file_data", "size": len(test_file_data)},
)
id = await init_resp.json()
for start in range(0, len(test_file_data), chunk_size):
end = min(start + chunk_size, len(test_file_data))
res = await ofrak_client.post(
"/root_resource_chunk",
params={"id": id, "start": start, "end": end},
data=test_file_data[start:end],
)
create_resp = await ofrak_client.post(
"/create_chunked_root_resource", params={"name": "test_file_data", "id": id}
)
assert create_resp.status == 200
length_resp = await ofrak_client.get(f"/{id}/get_data_length")
length_resp_body = await length_resp.json()
assert length_resp_body == len(test_file_data)


async def test_get_root_resources(
ofrak_client: TestClient, ofrak_context, ofrak_server, hello_world_elf
):
Expand Down