Skip to content

Commit

Permalink
Merge branch 'master' into vincent/SDK-1636
Browse files Browse the repository at this point in the history
  • Loading branch information
vincent-dfinity authored Sep 3, 2024
2 parents 63bd235 + 77164f9 commit 50681a1
Show file tree
Hide file tree
Showing 41 changed files with 218 additions and 105 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,14 @@ This applies to the following:
[extension catalog](https://github.com/dfinity/dfx-extensions/blob/main/catalog.json).
The extension catalog can be overridden with the `--catalog-url` parameter.

## Dependencies

### Frontend canister

Added `create_chunks`. It has the same behavior as `create_chunk`, except that it takes a `vec blob` and returns a `vec BatchId` instead of non-`vec` variants.

Module hash: 3a533f511b3960b4186e76cf9abfbd8222a2c507456a66ec55671204ee70cae3

# 0.23.0

### feat: Add canister snapshots
Expand Down
27 changes: 26 additions & 1 deletion docs/design/asset-canister-interface.md
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ Required Permission: [Prepare](#permission-prepare)
create_chunk: (
record {
batch_id: BatchId;
content: blob
content: blob
}
) -> (record {
chunk_id: ChunkId
Expand All @@ -286,6 +286,31 @@ Preconditions:

Required Permission: [Prepare](#permission-prepare)

### Method: `create_chunks`

```candid
create_chunks: (
record {
batch_id: BatchId;
content: vec blob;
}
) -> (
chunk_ids: vec ChunkId;
);
```

This method stores a number of chunks and extends the batch expiry time.

When creating chunks for a given content encoding, the size of each chunk except the last must be the same.

The asset canister must retain all data related to a batch for at least the [Minimum Batch Retention Duration](#constant-minimum-batch-retention-duration) after creating a chunk in a batch.

Preconditions:
- The batch exists.
- Creation of the chunk would not exceed chunk creation limits.

Required Permission: [Prepare](#permission-prepare)

### Method: `commit_batch`

```candid
Expand Down
1 change: 1 addition & 0 deletions src/canisters/frontend/ic-certified-assets/assets.did
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,7 @@ service: (asset_canister_args: opt AssetCanisterArgs) -> {
create_batch : (record {}) -> (record { batch_id: BatchId });

create_chunk: (record { batch_id: BatchId; content: blob }) -> (record { chunk_id: ChunkId });
create_chunks: (record { batch_id: BatchId; content: vec blob }) -> (record { chunk_ids: vec ChunkId });

// Perform all operations successfully, or reject
commit_batch: (CommitBatchArguments) -> ();
Expand Down
9 changes: 9 additions & 0 deletions src/canisters/frontend/ic-certified-assets/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,15 @@ fn create_chunk(arg: CreateChunkArg) -> CreateChunkResponse {
})
}

#[update(guard = "can_prepare")]
#[candid_method(update)]
fn create_chunks(arg: CreateChunksArg) -> CreateChunksResponse {
STATE.with(|s| match s.borrow_mut().create_chunks(arg, time()) {
Ok(chunk_ids) => CreateChunksResponse { chunk_ids },
Err(msg) => trap(&msg),
})
}

#[update(guard = "can_commit")]
#[candid_method(update)]
fn create_asset(arg: CreateAssetArguments) {
Expand Down
62 changes: 46 additions & 16 deletions src/canisters/frontend/ic-certified-assets/src/state_machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -582,43 +582,73 @@ impl State {
}

pub fn create_chunk(&mut self, arg: CreateChunkArg, now: u64) -> Result<ChunkId, String> {
let ids = self.create_chunks_helper(arg.batch_id, vec![arg.content], now)?;
ids.into_iter()
.next()
.ok_or_else(|| "Bug: created chunk did not return a chunk id.".to_string())
}

pub fn create_chunks(
&mut self,
CreateChunksArg {
batch_id,
content: chunks,
}: CreateChunksArg,
now: u64,
) -> Result<Vec<ChunkId>, String> {
self.create_chunks_helper(batch_id, chunks, now)
}

/// Post-condition: `chunks.len() == output_chunk_ids.len()`
fn create_chunks_helper(
&mut self,
batch_id: Nat,
chunks: Vec<ByteBuf>,
now: u64,
) -> Result<Vec<ChunkId>, String> {
if let Some(max_chunks) = self.configuration.max_chunks {
if self.chunks.len() + 1 > max_chunks as usize {
if self.chunks.len() + chunks.len() > max_chunks as usize {
return Err("chunk limit exceeded".to_string());
}
}
if let Some(max_bytes) = self.configuration.max_bytes {
let current_total_bytes = &self.batches.iter().fold(0, |acc, (_batch_id, batch)| {
acc + batch.chunk_content_total_size
});

if current_total_bytes + arg.content.as_ref().len() > max_bytes as usize {
let new_bytes: usize = chunks.iter().map(|chunk| chunk.len()).sum();
if current_total_bytes + new_bytes > max_bytes as usize {
return Err("byte limit exceeded".to_string());
}
}
let batch = self
.batches
.get_mut(&arg.batch_id)
.get_mut(&batch_id)
.ok_or_else(|| "batch not found".to_string())?;
if batch.commit_batch_arguments.is_some() {
return Err("batch has been proposed".to_string());
}

batch.expires_at = Int::from(now + BATCH_EXPIRY_NANOS);

let chunk_id = self.next_chunk_id.clone();
self.next_chunk_id += 1_u8;
batch.chunk_content_total_size += arg.content.as_ref().len();

self.chunks.insert(
chunk_id.clone(),
Chunk {
batch_id: arg.batch_id,
content: RcBytes::from(arg.content),
},
);
let chunks_len = chunks.len();

let mut chunk_ids = Vec::with_capacity(chunks.len());
for chunk in chunks {
let chunk_id = self.next_chunk_id.clone();
self.next_chunk_id += 1_u8;
batch.chunk_content_total_size += chunk.len();
self.chunks.insert(
chunk_id.clone(),
Chunk {
batch_id: batch_id.clone(),
content: RcBytes::from(chunk),
},
);
chunk_ids.push(chunk_id);
}

Ok(chunk_id)
debug_assert!(chunks_len == chunk_ids.len());
Ok(chunk_ids)
}

pub fn commit_batch(&mut self, arg: CommitBatchArguments, now: u64) -> Result<(), String> {
Expand Down
56 changes: 43 additions & 13 deletions src/canisters/frontend/ic-certified-assets/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use crate::types::{
SetAssetPropertiesArguments,
};
use crate::url_decode::{url_decode, UrlDecodeError};
use crate::CreateChunksArg;
use candid::{Nat, Principal};
use ic_certification_testing::CertificateBuilder;
use ic_crypto_tree_hash::Digest;
Expand Down Expand Up @@ -724,14 +725,24 @@ fn cannot_create_chunk_in_proposed_batch_() {
const BODY: &[u8] = b"<!DOCTYPE html><html></html>";
match state.create_chunk(
CreateChunkArg {
batch_id: batch_1,
batch_id: batch_1.clone(),
content: ByteBuf::from(BODY.to_vec()),
},
time_now,
) {
Err(err) if err == *"batch has been proposed" => {}
other => panic!("expected batch already proposed error, got: {:?}", other),
}
match state.create_chunks(
CreateChunksArg {
batch_id: batch_1,
content: vec![ByteBuf::from(BODY.to_vec())],
},
time_now,
) {
Err(err) if err == *"batch has been proposed" => {}
other => panic!("expected batch already proposed error, got: {:?}", other),
}
}

#[test]
Expand Down Expand Up @@ -3765,6 +3776,18 @@ mod enforce_limits {
time_now,
)
.unwrap();
assert_eq!(
state
.create_chunks(
CreateChunksArg {
batch_id: batch_2.clone(),
content: vec![ByteBuf::new(), ByteBuf::new()]
},
time_now
)
.unwrap_err(),
"chunk limit exceeded"
);
state
.create_chunk(
CreateChunkArg {
Expand Down Expand Up @@ -3818,20 +3841,27 @@ mod enforce_limits {

let batch_1 = state.create_batch(time_now).unwrap();
let batch_2 = state.create_batch(time_now).unwrap();
assert_eq!(
state
.create_chunks(
CreateChunksArg {
batch_id: batch_1.clone(),
content: vec![
ByteBuf::from(c0.clone()),
ByteBuf::from(c1.clone()),
ByteBuf::from(c2.clone())
]
},
time_now
)
.unwrap_err(),
"byte limit exceeded"
);
state
.create_chunk(
CreateChunkArg {
.create_chunks(
CreateChunksArg {
batch_id: batch_1.clone(),
content: ByteBuf::from(c0),
},
time_now,
)
.unwrap();
state
.create_chunk(
CreateChunkArg {
batch_id: batch_2.clone(),
content: ByteBuf::from(c1),
content: vec![ByteBuf::from(c0), ByteBuf::from(c1)],
},
time_now,
)
Expand Down
11 changes: 11 additions & 0 deletions src/canisters/frontend/ic-certified-assets/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,17 @@ pub struct CreateChunkResponse {
pub chunk_id: ChunkId,
}

#[derive(Clone, Debug, CandidType, Deserialize)]
pub struct CreateChunksArg {
pub batch_id: BatchId,
pub content: Vec<ByteBuf>,
}

#[derive(Clone, Debug, CandidType, Deserialize)]
pub struct CreateChunksResponse {
pub chunk_ids: Vec<ChunkId>,
}

#[derive(Clone, Debug, CandidType, Deserialize, PartialEq, Eq)]
pub struct AssetProperties {
pub max_age: Option<u64>,
Expand Down
2 changes: 1 addition & 1 deletion src/dfx/assets/project_templates/any_js/package.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"name": "{project_name}",
"name": "__project_name__",
"type": "module",
"scripts": {
"start": "npm start --workspaces --if-present",
Expand Down
10 changes: 5 additions & 5 deletions src/dfx/assets/project_templates/azle/dfx.json-patch
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
[
{
"path": "/canisters/{backend_name}",
"path": "/canisters/__backend_name__",
"op": "add",
"value": {
"type": "custom",
"main": "src/{backend_name}/src/index.ts",
"candid": "src/{backend_name}/{backend_name}.did",
"build": "npx azle {backend_name}",
"wasm": ".azle/{backend_name}/{backend_name}.wasm",
"main": "src/__backend_name__/src/index.ts",
"candid": "src/__backend_name__/__backend_name__.did",
"build": "npx azle __backend_name__",
"wasm": ".azle/__backend_name__/__backend_name__.wasm",
"gzip": true,
"tech_stack": {
"language": {
Expand Down
2 changes: 1 addition & 1 deletion src/dfx/assets/project_templates/azle/package.json-patch
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
{
"op": "add",
"path": "/workspaces/-",
"value": "src/{backend_name}"
"value": "src/__backend_name__"
}
]
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"name": "{backend_name}",
"name": "__backend_name__",
"version": "0.0.0",
"private": true,
"type": "module",
Expand Down
8 changes: 4 additions & 4 deletions src/dfx/assets/project_templates/base/README.md
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
# `{project_name}`
# `__project_name__`

Welcome to your new `{project_name}` project and to the Internet Computer development community. By default, creating a new project adds this README and some template files to your project directory. You can edit these template files to customize your project and to include your own code to speed up the development cycle.
Welcome to your new `__project_name__` project and to the Internet Computer development community. By default, creating a new project adds this README and some template files to your project directory. You can edit these template files to customize your project and to include your own code to speed up the development cycle.

To get started, you might want to explore the project directory structure and the default configuration file. Working with this project in your development environment will not affect any production deployment or identity tokens.

To learn more before you start working with `{project_name}`, see the following documentation available online:
To learn more before you start working with `__project_name__`, see the following documentation available online:

- [Quick Start](https://internetcomputer.org/docs/current/developer-docs/setup/deploy-locally)
- [SDK Developer Tools](https://internetcomputer.org/docs/current/developer-docs/setup/install)

If you want to start working on your project right away, you might want to try the following commands:

```bash
cd {project_name}/
cd __project_name__/
dfx help
dfx canister --help
```
Expand Down
10 changes: 5 additions & 5 deletions src/dfx/assets/project_templates/kybra/dfx.json-patch
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
[
{
"path": "/canisters/{backend_name}",
"path": "/canisters/__backend_name__",
"op": "add",
"value": {
"type": "custom",
"build": "python -m kybra {backend_name} src/{backend_name}/src/main.py src/{backend_name}/{backend_name}.did",
"post_install": ".kybra/{backend_name}/post_install.sh",
"candid": "src/{backend_name}/{backend_name}.did",
"wasm": ".kybra/{backend_name}/{backend_name}.wasm",
"build": "python -m kybra __backend_name__ src/__backend_name__/src/main.py src/__backend_name__/__backend_name__.did",
"post_install": ".kybra/__backend_name__/post_install.sh",
"candid": "src/__backend_name__/__backend_name__.did",
"wasm": ".kybra/__backend_name__/__backend_name__.wasm",
"gzip": true,
"tech_stack": {
"language": {
Expand Down
4 changes: 2 additions & 2 deletions src/dfx/assets/project_templates/motoko/dfx.json-patch
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
[
{
"op": "add",
"path": "/canisters/{backend_name}",
"path": "/canisters/__backend_name__",
"value": {
"type": "motoko",
"main": "src/{backend_name}/main.mo"
"main": "src/__backend_name__/main.mo"
}
}
]
8 changes: 4 additions & 4 deletions src/dfx/assets/project_templates/react/dfx.json-patch
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
[
{
"op": "add",
"path": "/canisters/{frontend_name}",
"path": "/canisters/__frontend_name__",
"value": {
"type": "assets",
"source": [
"src/{frontend_name}/dist"
"src/__frontend_name__/dist"
],
"dependencies": [
"{backend_name}"
"__backend_name__"
],
"workspace": "{frontend_name}"
"workspace": "__frontend_name__"
}
}
]
Loading

0 comments on commit 50681a1

Please sign in to comment.