From 5bddd28b0c58c50626ff4dd2aa3d9f0ca3c1d583 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 13 Apr 2024 22:25:39 +0300 Subject: [PATCH 001/354] dp[endency times: partly done --- src/dfx/src/lib/models/canister.rs | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 5884195458..1a77e79d3a 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -21,6 +21,7 @@ use ic_wasm::metadata::{add_metadata, remove_metadata, Kind}; use ic_wasm::optimize::OptLevel; use itertools::Itertools; use petgraph::graph::{DiGraph, NodeIndex}; +use petgraph::visit::Bfs; use rand::{thread_rng, RngCore}; use slog::{error, info, trace, warn, Logger}; use std::cell::RefCell; @@ -702,20 +703,38 @@ impl CanisterPool { }; BuildError::DependencyError(format!("Found circular dependency: {}", message)) })?; - let order: Vec = nodes + let order: Vec<(&NodeIndex, CanisterId)> = nodes .iter() .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. - .map(|idx| *graph.node_weight(*idx).unwrap()) + .map(|idx| (idx, *graph.node_weight(*idx).unwrap())) .collect(); let canisters_to_build = self.canisters_to_build(build_config); let mut result = Vec::new(); - for canister_id in &order { + for (&idx, canister_id) in &order { if let Some(canister) = self.get_canister(canister_id) { if canisters_to_build .iter() .map(|c| c.get_name()) .contains(&canister.get_name()) + && { + use dfx_core::fs::metadata; + let wasm_file_name = format!( + "{}/{}/{}.wasm", + canister.get_info().get_output_root().display(), canister.get_name(), canister.get_name() + ); + let wasm_file_metadata = metadata(Path::new(&wasm_file_name))?; + let wasm_file_time = wasm_file_metadata.modified()?; + let need_build = false; + let mut bfs = Bfs::new(&graph, idx); + loop { + // if let Some(&node) = bfs.next(graph) { + // } else { + break false; + // } + }; + true + } { trace!(log, "Building canister '{}'.", canister.get_name()); } else { @@ -755,7 +774,7 @@ impl CanisterPool { } } - self.step_postbuild_all(build_config, &order) + self.step_postbuild_all(build_config, &order.iter().map(|e| e.1).collect::>()) .map_err(|e| DfxError::new(BuildError::PostBuildAllStepFailed(Box::new(e))))?; Ok(result) From 1a01bfd106bff6221019bb953c8ea7625363c0e2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 13 Apr 2024 22:26:22 +0300 Subject: [PATCH 002/354] small simplification --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 1a77e79d3a..b360155345 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -774,7 +774,7 @@ impl CanisterPool { } } - self.step_postbuild_all(build_config, &order.iter().map(|e| e.1).collect::>()) + self.step_postbuild_all(build_config, &order.into_iter().map(|e| e.1).collect::>()) .map_err(|e| DfxError::new(BuildError::PostBuildAllStepFailed(Box::new(e))))?; Ok(result) From 46b6e2064212415879730af1292400db659eb054 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 13 Apr 2024 22:31:01 +0300 Subject: [PATCH 003/354] more correct code --- src/dfx/src/lib/models/canister.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index b360155345..97a4ae88d9 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -719,13 +719,11 @@ impl CanisterPool { .contains(&canister.get_name()) && { use dfx_core::fs::metadata; - let wasm_file_name = format!( - "{}/{}/{}.wasm", - canister.get_info().get_output_root().display(), canister.get_name(), canister.get_name() - ); - let wasm_file_metadata = metadata(Path::new(&wasm_file_name))?; + let wasm_file_name = canister.get_info().get_output_root() + .join(Path::new(canister.get_name())) + .join(Path::new(&format!("{}.wasm", canister.get_name()))); + let wasm_file_metadata = metadata(wasm_file_name.as_path())?; let wasm_file_time = wasm_file_metadata.modified()?; - let need_build = false; let mut bfs = Bfs::new(&graph, idx); loop { // if let Some(&node) = bfs.next(graph) { From af940e1e47498e56fbfaea02095e55e48131a380 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 13 Apr 2024 22:31:58 +0300 Subject: [PATCH 004/354] uncommented some code --- src/dfx/src/lib/models/canister.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 97a4ae88d9..6dc4adfd3f 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -726,10 +726,10 @@ impl CanisterPool { let wasm_file_time = wasm_file_metadata.modified()?; let mut bfs = Bfs::new(&graph, idx); loop { - // if let Some(&node) = bfs.next(graph) { - // } else { + if let Some(node) = bfs.next(&graph) { + } else { break false; - // } + } }; true } From b9d00c88b2aa891825f578552e880b679b369c22 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 13 Apr 2024 22:42:52 +0300 Subject: [PATCH 005/354] more code towards file times based build --- src/dfx/src/lib/models/canister.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 6dc4adfd3f..eb64d72a9b 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -726,7 +726,13 @@ impl CanisterPool { let wasm_file_time = wasm_file_metadata.modified()?; let mut bfs = Bfs::new(&graph, idx); loop { - if let Some(node) = bfs.next(&graph) { + if let Some(node_index) = bfs.next(&graph) { + if let Some(node) = graph.node_weight(node_index) { + // FIXME: We need the graph of dependencies including `.mo` files, not only canisters. + // TODO + } else { + panic!("cannot get canister"); + } } else { break false; } From e5efe66901b95849e460635ad5f1a0438789dd08 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 01:17:04 +0300 Subject: [PATCH 006/354] developing --- src/dfx/src/lib/builders/motoko.rs | 56 ++++++++++++++++++++++++++++-- 1 file changed, 53 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 344d1519dc..a9e43110b3 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -9,10 +9,11 @@ use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE}; use crate::lib::models::canister::CanisterPool; use crate::lib::package_arguments::{self, PackageArguments}; use crate::util::assets::management_idl; -use anyhow::Context; +use anyhow::{anyhow, Context}; use candid::Principal as CanisterId; use dfx_core::config::cache::Cache; use dfx_core::config::model::dfinity::{MetadataVisibility, Profile}; +use dfx_core::fs::metadata; use fn_error_context::context; use slog::{info, o, trace, warn, Logger}; use std::collections::{BTreeMap, BTreeSet}; @@ -118,11 +119,18 @@ impl CanisterBuilder for MotokoBuilder { let input_path = motoko_info.get_main_path(); let output_wasm_path = motoko_info.get_output_wasm_path(); + // from name to principal: let id_map = pool .get_canister_list() .iter() .map(|c| (c.get_name().to_string(), c.canister_id().to_text())) .collect(); + // from principal to name: + let rev_id_map: BTreeMap = pool + .get_canister_list() + .iter() + .map(|c| (c.canister_id().to_text(), c.get_name().to_string())) + .collect(); std::fs::create_dir_all(motoko_info.get_output_root()).with_context(|| { format!( @@ -135,9 +143,10 @@ impl CanisterBuilder for MotokoBuilder { std::fs::create_dir_all(idl_dir_path) .with_context(|| format!("Failed to create {}.", idl_dir_path.to_string_lossy()))?; + let imports = get_imports(cache.as_ref(), &motoko_info)?; + // If the management canister is being imported, emit the candid file. - if get_imports(cache.as_ref(), &motoko_info)? - .contains(&MotokoImport::Ic("aaaaa-aa".to_string())) + if imports.contains(&MotokoImport::Ic("aaaaa-aa".to_string())) { let management_idl_path = idl_dir_path.join("aaaaa-aa.did"); dfx_core::fs::write(management_idl_path, management_idl()?)?; @@ -146,6 +155,47 @@ impl CanisterBuilder for MotokoBuilder { let package_arguments = package_arguments::load(cache.as_ref(), motoko_info.get_packtool())?; + let wasm_file_metadata = metadata(output_wasm_path)?; + let wasm_file_time = wasm_file_metadata.modified()?; + let mut import_iter = imports.iter(); + loop { + if let Some(import) = import_iter.next() { + // FIXME: Is `build_root` correct below? + match import { + MotokoImport::Canister(canisterName) => { + let wasm_file_name = config.build_root + .join(Path::new(canisterName)) + .join(Path::new(&format!("{}.wasm", canisterName))); + } + MotokoImport::Ic(canisterId) => { + if let Some(canisterName) = rev_id_map.get(canisterId) { + let wasm_file_name = config.build_root + .join(Path::new(canisterName)) + .join(Path::new(&format!("{}.wasm", canisterName))); + } + } + MotokoImport::Lib(path) => { + // FIXME: `lib` in name + if let Some(canisterName) = Path::new(path).components().last() { + let canisterName: &Path = canisterName.as_ref(); + let wasm_file_name = config.build_root + .join(Path::new(canisterName)) + .join(Path::new(&format!("{}.wasm", canisterName.to_str().unwrap()))); + } + } + MotokoImport::Relative(path) => { + let import_file_metadata = metadata(path)?; + let import_file_time = import_file_metadata.modified()?; + if import_file_time > wasm_file_time { + break; + }; + } + } + } else { + return Err(anyhow!("already compiled")); // TODO: Ensure that `dfx` command doesn't return false because of this. + } + } + let moc_arguments = match motoko_info.get_args() { Some(args) => [ package_arguments, From 2abf6df3597350e34ccfea04af29c35d4d8966d2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 02:28:34 +0300 Subject: [PATCH 007/354] developing --- src/dfx/src/lib/builders/motoko.rs | 53 +++++++++++++++++++----------- 1 file changed, 33 insertions(+), 20 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index a9e43110b3..bc72431234 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -19,6 +19,7 @@ use slog::{info, o, trace, warn, Logger}; use std::collections::{BTreeMap, BTreeSet}; use std::convert::TryFrom; use std::fmt::Debug; +use std::ops::Deref; use std::path::{Path, PathBuf}; use std::process::Output; use std::sync::Arc; @@ -160,37 +161,49 @@ impl CanisterBuilder for MotokoBuilder { let mut import_iter = imports.iter(); loop { if let Some(import) = import_iter.next() { - // FIXME: Is `build_root` correct below? - match import { + let imported_file = match import { MotokoImport::Canister(canisterName) => { - let wasm_file_name = config.build_root - .join(Path::new(canisterName)) - .join(Path::new(&format!("{}.wasm", canisterName))); + if let Some(canister) = pool.get_first_canister_with_name(canisterName) { + let canister = canister.clone(); // TODO: remove? + if let Some(main_file) = *canister.get_info().get_main_file() { + Some(main_file.clone()) + } else { + None + } + } else { + None + } } MotokoImport::Ic(canisterId) => { if let Some(canisterName) = rev_id_map.get(canisterId) { - let wasm_file_name = config.build_root - .join(Path::new(canisterName)) - .join(Path::new(&format!("{}.wasm", canisterName))); + if let Some(canister) = pool.get_first_canister_with_name(canisterName) { + if let Some(main_file) = canister.get_info().get_main_file() { + Some(main_file) + } else { + None + } + } else { + None + } + } else { + None } } MotokoImport::Lib(path) => { // FIXME: `lib` in name - if let Some(canisterName) = Path::new(path).components().last() { - let canisterName: &Path = canisterName.as_ref(); - let wasm_file_name = config.build_root - .join(Path::new(canisterName)) - .join(Path::new(&format!("{}.wasm", canisterName.to_str().unwrap()))); - } + Some(Path::new(path)) } MotokoImport::Relative(path) => { - let import_file_metadata = metadata(path)?; - let import_file_time = import_file_metadata.modified()?; - if import_file_time > wasm_file_time { - break; - }; + Some(Path::new(path)) } - } + }; + if let Some(imported_file) = imported_file { + let imported_file_metadata = metadata(imported_file)?; + let imported_file_time = imported_file_metadata.modified()?; + if imported_file_time > wasm_file_time { + break; + }; + }; } else { return Err(anyhow!("already compiled")); // TODO: Ensure that `dfx` command doesn't return false because of this. } From fa666dfa9e5980b5f8a59950656d9b0f90f7e9ee Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 02:51:53 +0300 Subject: [PATCH 008/354] succeeded to compile --- src/dfx/src/lib/builders/motoko.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index bc72431234..55d812af12 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -165,8 +165,10 @@ impl CanisterBuilder for MotokoBuilder { MotokoImport::Canister(canisterName) => { if let Some(canister) = pool.get_first_canister_with_name(canisterName) { let canister = canister.clone(); // TODO: remove? - if let Some(main_file) = *canister.get_info().get_main_file() { - Some(main_file.clone()) + let main_file = canister.get_info().get_main_file().clone(); + if let Some(main_file) = main_file.clone() { + let main_file = main_file.to_owned(); + Some(main_file) } else { None } @@ -178,7 +180,7 @@ impl CanisterBuilder for MotokoBuilder { if let Some(canisterName) = rev_id_map.get(canisterId) { if let Some(canister) = pool.get_first_canister_with_name(canisterName) { if let Some(main_file) = canister.get_info().get_main_file() { - Some(main_file) + Some(main_file.to_owned()) } else { None } @@ -191,14 +193,14 @@ impl CanisterBuilder for MotokoBuilder { } MotokoImport::Lib(path) => { // FIXME: `lib` in name - Some(Path::new(path)) + Some(Path::new(path).to_owned()) } MotokoImport::Relative(path) => { - Some(Path::new(path)) + Some(Path::new(path).to_owned()) } }; if let Some(imported_file) = imported_file { - let imported_file_metadata = metadata(imported_file)?; + let imported_file_metadata = metadata(imported_file.as_ref())?; let imported_file_time = imported_file_metadata.modified()?; if imported_file_time > wasm_file_time { break; From ec1ab546c0fa2fd7df7a363d7559d8d0f803bc2b Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 02:52:21 +0300 Subject: [PATCH 009/354] corrected identifier casing --- src/dfx/src/lib/builders/motoko.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 55d812af12..e484360ea2 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -162,7 +162,7 @@ impl CanisterBuilder for MotokoBuilder { loop { if let Some(import) = import_iter.next() { let imported_file = match import { - MotokoImport::Canister(canisterName) => { + MotokoImport::Canister(canister_name) => { if let Some(canister) = pool.get_first_canister_with_name(canisterName) { let canister = canister.clone(); // TODO: remove? let main_file = canister.get_info().get_main_file().clone(); @@ -176,8 +176,8 @@ impl CanisterBuilder for MotokoBuilder { None } } - MotokoImport::Ic(canisterId) => { - if let Some(canisterName) = rev_id_map.get(canisterId) { + MotokoImport::Ic(canister_id) => { + if let Some(canister_name) = rev_id_map.get(canisterId) { if let Some(canister) = pool.get_first_canister_with_name(canisterName) { if let Some(main_file) = canister.get_info().get_main_file() { Some(main_file.to_owned()) From 7aea175ff6d836ba032bd4440d9eaee988a6b51f Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 02:53:04 +0300 Subject: [PATCH 010/354] bug fix --- src/dfx/src/lib/builders/motoko.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index e484360ea2..1d737ad44e 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -163,7 +163,7 @@ impl CanisterBuilder for MotokoBuilder { if let Some(import) = import_iter.next() { let imported_file = match import { MotokoImport::Canister(canister_name) => { - if let Some(canister) = pool.get_first_canister_with_name(canisterName) { + if let Some(canister) = pool.get_first_canister_with_name(canister_name) { let canister = canister.clone(); // TODO: remove? let main_file = canister.get_info().get_main_file().clone(); if let Some(main_file) = main_file.clone() { @@ -177,8 +177,8 @@ impl CanisterBuilder for MotokoBuilder { } } MotokoImport::Ic(canister_id) => { - if let Some(canister_name) = rev_id_map.get(canisterId) { - if let Some(canister) = pool.get_first_canister_with_name(canisterName) { + if let Some(canister_name) = rev_id_map.get(canister_id) { + if let Some(canister) = pool.get_first_canister_with_name(canister_name) { if let Some(main_file) = canister.get_info().get_main_file() { Some(main_file.to_owned()) } else { From 88341007b0670164d9c2aed39e9e4819064e239e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 02:54:21 +0300 Subject: [PATCH 011/354] refactor --- src/dfx/src/lib/builders/motoko.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 1d737ad44e..585932f9ac 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -164,11 +164,9 @@ impl CanisterBuilder for MotokoBuilder { let imported_file = match import { MotokoImport::Canister(canister_name) => { if let Some(canister) = pool.get_first_canister_with_name(canister_name) { - let canister = canister.clone(); // TODO: remove? - let main_file = canister.get_info().get_main_file().clone(); - if let Some(main_file) = main_file.clone() { - let main_file = main_file.to_owned(); - Some(main_file) + let main_file = canister.get_info().get_main_file(); + if let Some(main_file) = main_file { + Some(main_file.to_owned()) } else { None } From 1fab9d3d0e8cf4be89ae14e23f91fe79743ba5b3 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 03:01:31 +0300 Subject: [PATCH 012/354] removed unused import --- src/dfx/src/lib/builders/motoko.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 585932f9ac..bbf7a92d42 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -19,7 +19,6 @@ use slog::{info, o, trace, warn, Logger}; use std::collections::{BTreeMap, BTreeSet}; use std::convert::TryFrom; use std::fmt::Debug; -use std::ops::Deref; use std::path::{Path, PathBuf}; use std::process::Output; use std::sync::Arc; From 47dcd0e9134f9fc2a065be03d7ba1fc69cc424b9 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 03:02:31 +0300 Subject: [PATCH 013/354] revert wrongly modified file --- src/dfx/src/lib/models/canister.rs | 31 ++++-------------------------- 1 file changed, 4 insertions(+), 27 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index eb64d72a9b..5884195458 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -21,7 +21,6 @@ use ic_wasm::metadata::{add_metadata, remove_metadata, Kind}; use ic_wasm::optimize::OptLevel; use itertools::Itertools; use petgraph::graph::{DiGraph, NodeIndex}; -use petgraph::visit::Bfs; use rand::{thread_rng, RngCore}; use slog::{error, info, trace, warn, Logger}; use std::cell::RefCell; @@ -703,42 +702,20 @@ impl CanisterPool { }; BuildError::DependencyError(format!("Found circular dependency: {}", message)) })?; - let order: Vec<(&NodeIndex, CanisterId)> = nodes + let order: Vec = nodes .iter() .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. - .map(|idx| (idx, *graph.node_weight(*idx).unwrap())) + .map(|idx| *graph.node_weight(*idx).unwrap()) .collect(); let canisters_to_build = self.canisters_to_build(build_config); let mut result = Vec::new(); - for (&idx, canister_id) in &order { + for canister_id in &order { if let Some(canister) = self.get_canister(canister_id) { if canisters_to_build .iter() .map(|c| c.get_name()) .contains(&canister.get_name()) - && { - use dfx_core::fs::metadata; - let wasm_file_name = canister.get_info().get_output_root() - .join(Path::new(canister.get_name())) - .join(Path::new(&format!("{}.wasm", canister.get_name()))); - let wasm_file_metadata = metadata(wasm_file_name.as_path())?; - let wasm_file_time = wasm_file_metadata.modified()?; - let mut bfs = Bfs::new(&graph, idx); - loop { - if let Some(node_index) = bfs.next(&graph) { - if let Some(node) = graph.node_weight(node_index) { - // FIXME: We need the graph of dependencies including `.mo` files, not only canisters. - // TODO - } else { - panic!("cannot get canister"); - } - } else { - break false; - } - }; - true - } { trace!(log, "Building canister '{}'.", canister.get_name()); } else { @@ -778,7 +755,7 @@ impl CanisterPool { } } - self.step_postbuild_all(build_config, &order.into_iter().map(|e| e.1).collect::>()) + self.step_postbuild_all(build_config, &order) .map_err(|e| DfxError::new(BuildError::PostBuildAllStepFailed(Box::new(e))))?; Ok(result) From 654512f10e5b1bb911408b99b10deddc5d2d01fe Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 04:41:51 +0300 Subject: [PATCH 014/354] file times based build --- src/dfx/src/lib/builders/motoko.rs | 39 +++++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index bbf7a92d42..db5686d560 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -9,6 +9,7 @@ use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE}; use crate::lib::models::canister::CanisterPool; use crate::lib::package_arguments::{self, PackageArguments}; use crate::util::assets::management_idl; +use crate::lib::builders::bail; use anyhow::{anyhow, Context}; use candid::Principal as CanisterId; use dfx_core::config::cache::Cache; @@ -154,6 +155,18 @@ impl CanisterBuilder for MotokoBuilder { let package_arguments = package_arguments::load(cache.as_ref(), motoko_info.get_packtool())?; + let mut package_arguments_map = BTreeMap::::new(); // TODO: Can we deal without cloning strings? + { // block + let mut i = 0; + while i + 3 <= package_arguments.len() { + if package_arguments[i] == "--package" { + package_arguments_map.insert(package_arguments[i+1].clone(), package_arguments[i+2].clone()); + i += 3; + } else { + i += 1; + } + }; + } let wasm_file_metadata = metadata(output_wasm_path)?; let wasm_file_time = wasm_file_metadata.modified()?; @@ -189,8 +202,28 @@ impl CanisterBuilder for MotokoBuilder { } } MotokoImport::Lib(path) => { - // FIXME: `lib` in name - Some(Path::new(path).to_owned()) + let i = path.find('/'); + let pre_path = if let Some(i) = i { + let expanded = Path::new( + package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? + ).join(Path::new("src")); + expanded.join(&path[i+1..]) + } else { + Path::new(path).to_owned() + }; + let path2 = pre_path.to_string_lossy() + ".mo"; // TODO: Is `lossy` OK? + let path2 = path2.to_string(); + let path2 = Path::new(&path2); + if path2.exists() { // TODO: Is it correct order of two variants? + Some(Path::new(path2).to_owned()) + } else { + let path3 = pre_path.join(Path::new("lib.mo")); + if path3.exists() { + Some(path3.to_owned()) + } else { + bail!("source file has been deleted"); + } + } } MotokoImport::Relative(path) => { Some(Path::new(path).to_owned()) @@ -204,7 +237,7 @@ impl CanisterBuilder for MotokoBuilder { }; }; } else { - return Err(anyhow!("already compiled")); // TODO: Ensure that `dfx` command doesn't return false because of this. + bail!("already compiled"); // FIXME: Ensure that `dfx` command doesn't return false because of this. } } From 4b058fc52f80988b1bbe99ce7613b39078e4ae4f Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 05:00:46 +0300 Subject: [PATCH 015/354] bug fixes --- src/dfx/src/lib/builders/motoko.rs | 115 +++++++++++++++-------------- 1 file changed, 59 insertions(+), 56 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index db5686d560..72a57c934f 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -168,78 +168,81 @@ impl CanisterBuilder for MotokoBuilder { }; } - let wasm_file_metadata = metadata(output_wasm_path)?; - let wasm_file_time = wasm_file_metadata.modified()?; - let mut import_iter = imports.iter(); - loop { - if let Some(import) = import_iter.next() { - let imported_file = match import { - MotokoImport::Canister(canister_name) => { - if let Some(canister) = pool.get_first_canister_with_name(canister_name) { - let main_file = canister.get_info().get_main_file(); - if let Some(main_file) = main_file { - Some(main_file.to_owned()) + // Check that one of the dependencies is newer than the target: + if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { + let wasm_file_time = wasm_file_metadata.modified()?; + let mut import_iter = imports.iter(); + loop { + if let Some(import) = import_iter.next() { + let imported_file = match import { + MotokoImport::Canister(canister_name) => { + if let Some(canister) = pool.get_first_canister_with_name(canister_name) { + let main_file = canister.get_info().get_main_file(); + if let Some(main_file) = main_file { + Some(main_file.to_owned()) + } else { + None + } } else { None } - } else { - None } - } - MotokoImport::Ic(canister_id) => { - if let Some(canister_name) = rev_id_map.get(canister_id) { - if let Some(canister) = pool.get_first_canister_with_name(canister_name) { - if let Some(main_file) = canister.get_info().get_main_file() { - Some(main_file.to_owned()) + MotokoImport::Ic(canister_id) => { + if let Some(canister_name) = rev_id_map.get(canister_id) { + if let Some(canister) = pool.get_first_canister_with_name(canister_name) { + if let Some(main_file) = canister.get_info().get_main_file() { + Some(main_file.to_owned()) + } else { + None + } } else { None } } else { None } - } else { - None } - } - MotokoImport::Lib(path) => { - let i = path.find('/'); - let pre_path = if let Some(i) = i { - let expanded = Path::new( - package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? - ).join(Path::new("src")); - expanded.join(&path[i+1..]) - } else { - Path::new(path).to_owned() - }; - let path2 = pre_path.to_string_lossy() + ".mo"; // TODO: Is `lossy` OK? - let path2 = path2.to_string(); - let path2 = Path::new(&path2); - if path2.exists() { // TODO: Is it correct order of two variants? - Some(Path::new(path2).to_owned()) - } else { - let path3 = pre_path.join(Path::new("lib.mo")); - if path3.exists() { - Some(path3.to_owned()) + MotokoImport::Lib(path) => { + let i = path.find('/'); + let pre_path = if let Some(i) = i { + let expanded = Path::new( + package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? + ); + expanded.join(&path[i+1..]) + } else { + Path::new(path).to_owned() + }; + let path2 = pre_path.to_string_lossy() + ".mo"; // TODO: Is `lossy` OK? + let path2 = path2.to_string(); + let path2 = Path::new(&path2); + if path2.exists() { // TODO: Is it correct order of two variants? + Some(Path::new(path2).to_owned()) } else { - bail!("source file has been deleted"); + let path3 = pre_path.join(Path::new("lib.mo")); + println!("path3: {}", &path3.to_string_lossy()); // FIXME + if path3.exists() { + Some(path3.to_owned()) + } else { + bail!("source file has been deleted"); + } } } - } - MotokoImport::Relative(path) => { - Some(Path::new(path).to_owned()) - } - }; - if let Some(imported_file) = imported_file { - let imported_file_metadata = metadata(imported_file.as_ref())?; - let imported_file_time = imported_file_metadata.modified()?; - if imported_file_time > wasm_file_time { - break; + MotokoImport::Relative(path) => { + Some(Path::new(path).to_owned()) + } }; - }; - } else { - bail!("already compiled"); // FIXME: Ensure that `dfx` command doesn't return false because of this. + if let Some(imported_file) = imported_file { + let imported_file_metadata = metadata(imported_file.as_ref())?; + let imported_file_time = imported_file_metadata.modified()?; + if imported_file_time > wasm_file_time { + break; + }; + }; + } else { + bail!("already compiled"); // FIXME: Ensure that `dfx` command doesn't return false because of this. + } } - } + }; let moc_arguments = match motoko_info.get_args() { Some(args) => [ From ae58985d58cd23eaa3f4d6f711c6c94adb6c47b9 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 05:07:49 +0300 Subject: [PATCH 016/354] removed tracing --- src/dfx/src/lib/builders/motoko.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 72a57c934f..19b37511b7 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -219,7 +219,6 @@ impl CanisterBuilder for MotokoBuilder { Some(Path::new(path2).to_owned()) } else { let path3 = pre_path.join(Path::new("lib.mo")); - println!("path3: {}", &path3.to_string_lossy()); // FIXME if path3.exists() { Some(path3.to_owned()) } else { From 6030e49f022091145f17ef29aad03c9ba4021c44 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 05:54:45 +0300 Subject: [PATCH 017/354] comments and traces --- src/dfx/src/lib/builders/motoko.rs | 2 +- src/dfx/src/lib/models/canister.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 19b37511b7..3b57d3c395 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -93,7 +93,7 @@ impl CanisterBuilder for MotokoBuilder { info: &CanisterInfo, ) -> DfxResult> { let motoko_info = info.as_info::()?; - let imports = get_imports(self.cache.as_ref(), &motoko_info)?; + let imports = get_imports(self.cache.as_ref(), &motoko_info)?; // TODO: slow operation Ok(imports .iter() diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 5884195458..d39afdc239 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -691,6 +691,7 @@ impl CanisterPool { self.step_prebuild_all(log, build_config) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; + trace!(log, "Building dependencies graph."); let graph = self.build_dependencies_graph()?; let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { From 1b1dca9aaef9112a6c3e871026781d006afc36f2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 06:07:25 +0300 Subject: [PATCH 018/354] "already built" is not an error --- src/dfx/src/lib/builders/motoko.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 3b57d3c395..858526670c 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -238,7 +238,15 @@ impl CanisterBuilder for MotokoBuilder { }; }; } else { - bail!("already compiled"); // FIXME: Ensure that `dfx` command doesn't return false because of this. + // trace!(log, "Canister {} already compiled", canister_info.get_name()); // TODO + return Ok(BuildOutput { // duplicate code + canister_id: canister_info + .get_canister_id() + .expect("Could not find canister ID."), + wasm: WasmBuildOutput::File(motoko_info.get_output_wasm_path().to_path_buf()), + idl: IdlBuildOutput::File(motoko_info.get_output_idl_path().to_path_buf()), + }) + } } }; @@ -279,7 +287,7 @@ impl CanisterBuilder for MotokoBuilder { }; motoko_compile(&self.logger, cache.as_ref(), ¶ms)?; - Ok(BuildOutput { + Ok(BuildOutput { // duplicate code canister_id: canister_info .get_canister_id() .expect("Could not find canister ID."), From 77dfdb67f20348b5d06788e819f9d844a9d4b704 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 06:11:09 +0300 Subject: [PATCH 019/354] comment --- src/dfx/src/lib/builders/motoko.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 858526670c..6702d4ee22 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -144,7 +144,7 @@ impl CanisterBuilder for MotokoBuilder { std::fs::create_dir_all(idl_dir_path) .with_context(|| format!("Failed to create {}.", idl_dir_path.to_string_lossy()))?; - let imports = get_imports(cache.as_ref(), &motoko_info)?; + let imports = get_imports(cache.as_ref(), &motoko_info)?; // TODO: repeated slow operation // If the management canister is being imported, emit the candid file. if imports.contains(&MotokoImport::Ic("aaaaa-aa".to_string())) From 7ac8e392bd2c366f1c907fe0f637998df134667d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 06:17:41 +0300 Subject: [PATCH 020/354] CHANGELOG.md --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e7c4ec618c..edf7780640 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,11 @@ # UNRELEASED +### feat: rebuild only necessary canisters + +Don't compile canisters for which all dependencies are elder than the `.wasm` file. +This results in big compilation speedups. + ### feat: display schema for dfx metadata json `dfx schema --for dfx-metadata` to display JSON schema of the "dfx" metadata. From 1d0cbf9fa853c8b715e816b606722e66e78c0157 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 06:19:51 +0300 Subject: [PATCH 021/354] docs --- docs/cli-reference/dfx-build.mdx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/cli-reference/dfx-build.mdx b/docs/cli-reference/dfx-build.mdx index 0e58b8f43d..278792cfbb 100644 --- a/docs/cli-reference/dfx-build.mdx +++ b/docs/cli-reference/dfx-build.mdx @@ -10,6 +10,8 @@ Note that you can only run this command from within the project directory struct The `dfx build` command looks for the source code to compile using the information you have configured under the `canisters` section in the `dfx.json` configuration file. +For compilation speed reasons, `dfx build` (and `dfx deploy`) don't recompile canisters, all dependencies of which are elder than the existing WebAssembly (from the previous compilation). + ## Basic usage ``` bash From 68484b11803d6453b7fa0f9e3051178f971a9638 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 14:48:49 +0300 Subject: [PATCH 022/354] trying to get only needed dependencies --- src/dfx/src/commands/deps/pull.rs | 1 + src/dfx/src/lib/builders/motoko.rs | 5 +++++ src/dfx/src/lib/models/canister.rs | 19 +++++++++++++++---- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/src/dfx/src/commands/deps/pull.rs b/src/dfx/src/commands/deps/pull.rs index 3da9b7d9d8..6719f5d47c 100644 --- a/src/dfx/src/commands/deps/pull.rs +++ b/src/dfx/src/commands/deps/pull.rs @@ -82,6 +82,7 @@ async fn resolve_all_dependencies( ) -> DfxResult> { let mut canisters_to_resolve: VecDeque = pull_canisters_in_config.values().cloned().collect(); + println!("canisters_to_resolve: {:?}", canisters_to_resolve); // FIXME: Remove. let mut checked = BTreeSet::new(); while let Some(canister_id) = canisters_to_resolve.pop_front() { if !checked.contains(&canister_id) { diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 6702d4ee22..0d896a31ba 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -79,6 +79,10 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo) -> DfxResult DfxResult> { let motoko_info = info.as_info::()?; + println!("get_dependencies: {}", info.get_name()); // FIXME: Remove. let imports = get_imports(self.cache.as_ref(), &motoko_info)?; // TODO: slow operation Ok(imports diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index d39afdc239..7127b689d2 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -24,7 +24,7 @@ use petgraph::graph::{DiGraph, NodeIndex}; use rand::{thread_rng, RngCore}; use slog::{error, info, trace, warn, Logger}; use std::cell::RefCell; -use std::collections::{BTreeMap, HashSet}; +use std::collections::{BTreeMap, HashMap, HashSet}; use std::convert::TryFrom; use std::ffi::OsStr; use std::io::Read; @@ -528,10 +528,21 @@ impl CanisterPool { } #[context("Failed to build dependencies graph for canister pool.")] - fn build_dependencies_graph(&self) -> DfxResult> { + fn build_dependencies_graph(&self, canisters_to_build: Option>) -> DfxResult> { + println!("build_dependencies_graph"); // FIXME: Remove. let mut graph: DiGraph = DiGraph::new(); let mut id_set: BTreeMap> = BTreeMap::new(); + // TODO: Can be done faster by not using `collect` and/or `clone`? + let real_canisters_to_build = if let Some(canisters_to_build) = canisters_to_build { + let canisters_to_build_map: HashMap<&str, ()> = canisters_to_build.iter().map(|e| (e.as_str(), ())).collect(); + self.canisters.iter() + .filter(|c| canisters_to_build_map.contains_key(c.get_name())) + .map(|c| c.clone()).collect::>() + } else { + self.canisters.iter().map(|c| c.clone()).collect::>() + }; + // Add all the canisters as nodes. for canister in &self.canisters { let canister_id = canister.info.get_canister_id()?; @@ -539,7 +550,7 @@ impl CanisterPool { } // Add all the edges. - for canister in &self.canisters { + for canister in &real_canisters_to_build { let canister_id = canister.canister_id(); let canister_info = &canister.info; let deps = canister.builder.get_dependencies(self, canister_info)?; @@ -692,7 +703,7 @@ impl CanisterPool { .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; trace!(log, "Building dependencies graph."); - let graph = self.build_dependencies_graph()?; + let graph = self.build_dependencies_graph(build_config.canisters_to_build.clone())?; // TODO: Can `clone` be eliminated? let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { Some(canister_id) => match self.get_canister_info(canister_id) { From 2b2aa23c32a5ef30e60ea6f28b7bd7dd9c438dad Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 15:24:59 +0300 Subject: [PATCH 023/354] buggy code --- src/dfx/src/lib/models/canister.rs | 31 +++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 7127b689d2..f2c4d0907a 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -549,18 +549,31 @@ impl CanisterPool { id_set.insert(canister_id, graph.add_node(canister_id)); } - // Add all the edges. - for canister in &real_canisters_to_build { - let canister_id = canister.canister_id(); - let canister_info = &canister.info; - let deps = canister.builder.get_dependencies(self, canister_info)?; - if let Some(node_ix) = id_set.get(&canister_id) { - for d in deps { - if let Some(dep_ix) = id_set.get(&d) { - graph.add_edge(*node_ix, *dep_ix, ()); + // Traverse the graph of dependencies starting from `real_canisters_to_build` set. + let mut current_canisters_to_build = + HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); + loop { + let mut current_canisters_to_build2 = HashMap::new(); + for canister in &self.canisters { + if !current_canisters_to_build.contains_key(&canister.canister_id()) { + break; + } + let canister_id = canister.canister_id(); + let canister_info = &canister.info; + let deps = canister.builder.get_dependencies(self, canister_info)?; + if let Some(node_ix) = id_set.get(&canister_id) { + for d in deps { + if let Some(dep_ix) = id_set.get(&d) { + graph.add_edge(*node_ix, *dep_ix, ()); + current_canisters_to_build2.insert(*graph.node_weight(*dep_ix).unwrap(), ()); + } } } } + if current_canisters_to_build2.is_empty() { // passed to the end of the graph + break; + } + current_canisters_to_build = current_canisters_to_build2; } // Verify the graph has no cycles. From 368e687d6b74d7b8e77b51ad7c473d27f6969e58 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 15:29:40 +0300 Subject: [PATCH 024/354] FIXME comment --- src/dfx/src/lib/models/canister.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index f2c4d0907a..5bebd4b23e 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -550,6 +550,7 @@ impl CanisterPool { } // Traverse the graph of dependencies starting from `real_canisters_to_build` set. + // FIXME: This hangs if circular dependencies (see below). let mut current_canisters_to_build = HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); loop { From c2ea0c4874f9ffb7b5ed7fff8f02093e67db96a7 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 15:41:18 +0300 Subject: [PATCH 025/354] misc --- src/dfx/src/lib/models/canister.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 5bebd4b23e..659b74b4a0 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -21,6 +21,7 @@ use ic_wasm::metadata::{add_metadata, remove_metadata, Kind}; use ic_wasm::optimize::OptLevel; use itertools::Itertools; use petgraph::graph::{DiGraph, NodeIndex}; +use petgraph::visit::Bfs; use rand::{thread_rng, RngCore}; use slog::{error, info, trace, warn, Logger}; use std::cell::RefCell; @@ -527,6 +528,7 @@ impl CanisterPool { &self.logger } + /// Build only dependencies relevant for `canisters_to_build`. #[context("Failed to build dependencies graph for canister pool.")] fn build_dependencies_graph(&self, canisters_to_build: Option>) -> DfxResult> { println!("build_dependencies_graph"); // FIXME: Remove. @@ -734,6 +736,7 @@ impl CanisterPool { .map(|idx| *graph.node_weight(*idx).unwrap()) .collect(); + // let canisters_to_build = Bfs::new(graph, start); let canisters_to_build = self.canisters_to_build(build_config); let mut result = Vec::new(); for canister_id in &order { From 1fe1a1b3a379e85f70cd620f7c42fc0357de332e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 16:02:19 +0300 Subject: [PATCH 026/354] error checking --- src/dfx/src/lib/models/canister.rs | 33 +++++++++++++++--------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 659b74b4a0..4e7244c091 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -551,8 +551,22 @@ impl CanisterPool { id_set.insert(canister_id, graph.add_node(canister_id)); } + // Verify the graph has no cycles. + if let Err(err) = petgraph::algo::toposort(&graph, None) { + let message = match graph.node_weight(err.node_id()) { + Some(canister_id) => match self.get_canister_info(canister_id) { + Some(info) => info.get_name().to_string(), + None => format!("<{}>", canister_id.to_text()), + }, + None => "".to_string(), + }; + return Err(DfxError::new(BuildError::DependencyError(format!( + "Found circular dependency: {}", + message + )))) + } + // Traverse the graph of dependencies starting from `real_canisters_to_build` set. - // FIXME: This hangs if circular dependencies (see below). let mut current_canisters_to_build = HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); loop { @@ -579,22 +593,7 @@ impl CanisterPool { current_canisters_to_build = current_canisters_to_build2; } - // Verify the graph has no cycles. - if let Err(err) = petgraph::algo::toposort(&graph, None) { - let message = match graph.node_weight(err.node_id()) { - Some(canister_id) => match self.get_canister_info(canister_id) { - Some(info) => info.get_name().to_string(), - None => format!("<{}>", canister_id.to_text()), - }, - None => "".to_string(), - }; - Err(DfxError::new(BuildError::DependencyError(format!( - "Found circular dependency: {}", - message - )))) - } else { - Ok(graph) - } + Ok(graph) } #[context("Failed step_prebuild_all.")] From 7744db5f1e9a0b68416150faafd28426b89fd510 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 16:06:39 +0300 Subject: [PATCH 027/354] read only these --print-deps dependencies that are necessary --- CHANGELOG.md | 2 ++ src/dfx/src/commands/deps/pull.rs | 1 - src/dfx/src/lib/builders/motoko.rs | 5 ----- src/dfx/src/lib/models/canister.rs | 2 -- 4 files changed, 2 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index edf7780640..6b596d499f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,8 @@ ### feat: rebuild only necessary canisters +Read only those `--print-deps` dependencies that are necessary to read. + Don't compile canisters for which all dependencies are elder than the `.wasm` file. This results in big compilation speedups. diff --git a/src/dfx/src/commands/deps/pull.rs b/src/dfx/src/commands/deps/pull.rs index 6719f5d47c..3da9b7d9d8 100644 --- a/src/dfx/src/commands/deps/pull.rs +++ b/src/dfx/src/commands/deps/pull.rs @@ -82,7 +82,6 @@ async fn resolve_all_dependencies( ) -> DfxResult> { let mut canisters_to_resolve: VecDeque = pull_canisters_in_config.values().cloned().collect(); - println!("canisters_to_resolve: {:?}", canisters_to_resolve); // FIXME: Remove. let mut checked = BTreeSet::new(); while let Some(canister_id) = canisters_to_resolve.pop_front() { if !checked.contains(&canister_id) { diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 0d896a31ba..6702d4ee22 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -79,10 +79,6 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo) -> DfxResult DfxResult> { let motoko_info = info.as_info::()?; - println!("get_dependencies: {}", info.get_name()); // FIXME: Remove. let imports = get_imports(self.cache.as_ref(), &motoko_info)?; // TODO: slow operation Ok(imports diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 4e7244c091..500d9833ce 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -21,7 +21,6 @@ use ic_wasm::metadata::{add_metadata, remove_metadata, Kind}; use ic_wasm::optimize::OptLevel; use itertools::Itertools; use petgraph::graph::{DiGraph, NodeIndex}; -use petgraph::visit::Bfs; use rand::{thread_rng, RngCore}; use slog::{error, info, trace, warn, Logger}; use std::cell::RefCell; @@ -531,7 +530,6 @@ impl CanisterPool { /// Build only dependencies relevant for `canisters_to_build`. #[context("Failed to build dependencies graph for canister pool.")] fn build_dependencies_graph(&self, canisters_to_build: Option>) -> DfxResult> { - println!("build_dependencies_graph"); // FIXME: Remove. let mut graph: DiGraph = DiGraph::new(); let mut id_set: BTreeMap> = BTreeMap::new(); From 7dff290fc996e02b9f2e0a5fca95a57cc98e8141 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 17:27:56 +0300 Subject: [PATCH 028/354] speedup attempt --- src/dfx/src/lib/builders/motoko.rs | 65 ++++++++++++++---------------- src/dfx/src/lib/models/canister.rs | 28 ++++++++++++- 2 files changed, 57 insertions(+), 36 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 6702d4ee22..fba380b558 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -6,7 +6,7 @@ use crate::lib::canister_info::CanisterInfo; use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE}; -use crate::lib::models::canister::CanisterPool; +use crate::lib::models::canister::{CanisterPool, ImportsTracker, MotokoImport}; use crate::lib::package_arguments::{self, PackageArguments}; use crate::util::assets::management_idl; use crate::lib::builders::bail; @@ -17,9 +17,8 @@ use dfx_core::config::model::dfinity::{MetadataVisibility, Profile}; use dfx_core::fs::metadata; use fn_error_context::context; use slog::{info, o, trace, warn, Logger}; -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeMap; use std::convert::TryFrom; -use std::fmt::Debug; use std::path::{Path, PathBuf}; use std::process::Output; use std::sync::Arc; @@ -43,20 +42,19 @@ impl MotokoBuilder { } } +// TODO: Rename this function. #[context("Failed to find imports for canister at '{}'.", info.get_main_path().display())] -fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo) -> DfxResult> { - #[context("Failed recursive dependency detection at {}.", file.display())] - fn get_imports_recursive( +fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut ImportsTracker) -> DfxResult<()> { + #[context("Failed recursive dependency detection at {}.", file.display())] // FIXME + fn get_imports_recursive ( cache: &dyn Cache, file: &Path, - result: &mut BTreeSet, + imports: &mut ImportsTracker, ) -> DfxResult { - if result.contains(&MotokoImport::Relative(file.to_path_buf())) { + if imports.nodes.contains_key(&MotokoImport::Relative(file.to_path_buf())) { return Ok(()); } - result.insert(MotokoImport::Relative(file.to_path_buf())); - let mut command = cache.get_binary_command("moc")?; let command = command.arg("--print-deps").arg(file); let output = command @@ -68,10 +66,15 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo) -> DfxResult { - get_imports_recursive(cache, path.as_path(), result)?; + get_imports_recursive(cache, path.as_path(), imports)?; } _ => { - result.insert(import); + let parent = MotokoImport::Relative(file.to_path_buf()); + // imports.insert(parent); + + let parent_node = imports.graph.add_node(parent); + let child_node = imports.graph.add_node(import); + imports.graph.add_edge(parent_node, child_node, ()); } } } @@ -79,10 +82,9 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo) -> DfxResult DfxResult> { let motoko_info = info.as_info::()?; - let imports = get_imports(self.cache.as_ref(), &motoko_info)?; // TODO: slow operation + get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut())?; // TODO: slow operation - Ok(imports + Ok(pool.imports.borrow_mut().nodes .iter() .filter_map(|import| { - if let MotokoImport::Canister(name) = import { - pool.get_first_canister_with_name(name) + if let MotokoImport::Canister(name) = import.0 { + pool.get_first_canister_with_name(name.as_str()) } else { None } @@ -144,10 +146,10 @@ impl CanisterBuilder for MotokoBuilder { std::fs::create_dir_all(idl_dir_path) .with_context(|| format!("Failed to create {}.", idl_dir_path.to_string_lossy()))?; - let imports = get_imports(cache.as_ref(), &motoko_info)?; // TODO: repeated slow operation + get_imports(cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut())?; // TODO: repeated slow operation // If the management canister is being imported, emit the candid file. - if imports.contains(&MotokoImport::Ic("aaaaa-aa".to_string())) + if pool.imports.borrow().nodes.contains_key(&MotokoImport::Ic("aaaaa-aa".to_string())) { let management_idl_path = idl_dir_path.join("aaaaa-aa.did"); dfx_core::fs::write(management_idl_path, management_idl()?)?; @@ -171,12 +173,13 @@ impl CanisterBuilder for MotokoBuilder { // Check that one of the dependencies is newer than the target: if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { let wasm_file_time = wasm_file_metadata.modified()?; - let mut import_iter = imports.iter(); + let imports = pool.imports.borrow(); + let mut import_iter = imports.nodes.iter(); loop { if let Some(import) = import_iter.next() { - let imported_file = match import { + let imported_file = match import.0 { MotokoImport::Canister(canister_name) => { - if let Some(canister) = pool.get_first_canister_with_name(canister_name) { + if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = canister.get_info().get_main_file(); if let Some(main_file) = main_file { Some(main_file.to_owned()) @@ -188,7 +191,7 @@ impl CanisterBuilder for MotokoBuilder { } } MotokoImport::Ic(canister_id) => { - if let Some(canister_name) = rev_id_map.get(canister_id) { + if let Some(canister_name) = rev_id_map.get(canister_id.as_str()) { if let Some(canister) = pool.get_first_canister_with_name(canister_name) { if let Some(main_file) = canister.get_info().get_main_file() { Some(main_file.to_owned()) @@ -210,7 +213,7 @@ impl CanisterBuilder for MotokoBuilder { ); expanded.join(&path[i+1..]) } else { - Path::new(path).to_owned() + Path::new(path.as_str()).to_owned() }; let path2 = pre_path.to_string_lossy() + ".mo"; // TODO: Is `lossy` OK? let path2 = path2.to_string(); @@ -227,7 +230,7 @@ impl CanisterBuilder for MotokoBuilder { } } MotokoImport::Relative(path) => { - Some(Path::new(path).to_owned()) + Some(Path::new(&path).to_owned()) } }; if let Some(imported_file) = imported_file { @@ -364,14 +367,6 @@ fn motoko_compile(logger: &Logger, cache: &dyn Cache, params: &MotokoParams<'_>) Ok(()) } -#[derive(Debug, PartialOrd, Ord, PartialEq, Eq)] -enum MotokoImport { - Canister(String), - Ic(String), - Lib(String), - Relative(PathBuf), -} - impl TryFrom<&str> for MotokoImport { type Error = DfxError; diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 500d9833ce..b6121fe349 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -28,7 +28,7 @@ use std::collections::{BTreeMap, HashMap, HashSet}; use std::convert::TryFrom; use std::ffi::OsStr; use std::io::Read; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::sync::Arc; @@ -435,10 +435,35 @@ fn check_valid_subtype(compiled_idl_path: &Path, specified_idl_path: &Path) -> D Ok(()) } +/// TODO: Motoko-specific code not here +#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash)] +pub enum MotokoImport { + Canister(String), + Ic(String), + Lib(String), + Relative(PathBuf), +} + +/// The graph of Motoko imports (TODO: Motoko-specific code not here) +pub struct ImportsTracker { + pub nodes: HashMap, + pub graph: DiGraph, +} + +impl ImportsTracker { + pub fn new() -> Self { + Self { + nodes: HashMap::new(), + graph: DiGraph::new(), + } + } +} + /// A canister pool is a list of canisters. pub struct CanisterPool { canisters: Vec>, logger: Logger, + pub imports: RefCell, // TODO: `pub` is a bad habit. } struct PoolConstructHelper<'a> { @@ -493,6 +518,7 @@ impl CanisterPool { Ok(CanisterPool { canisters: canisters_map, logger, + imports: RefCell::new(ImportsTracker::new()), }) } From 851cbcaeae4a1ac1ae9e53629239da7b4212bc61 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 17:30:31 +0300 Subject: [PATCH 029/354] missing code to speed up --- src/dfx/src/lib/builders/motoko.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index fba380b558..924d945de3 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -66,7 +66,9 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor let import = MotokoImport::try_from(line).context("Failed to create MotokoImport.")?; match import { MotokoImport::Relative(path) => { - get_imports_recursive(cache, path.as_path(), imports)?; + if !imports.nodes.contains_key(&import) { // Don't look up already looked up dependencies + get_imports_recursive(cache, path.as_path(), imports)?; + } } _ => { let parent = MotokoImport::Relative(file.to_path_buf()); From 09e4763d613436b1d497b1f0e16e302946acc0c9 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 17:39:08 +0300 Subject: [PATCH 030/354] bug fix --- src/dfx/src/lib/builders/motoko.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 924d945de3..e3145b5837 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -64,7 +64,7 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor for line in output.lines() { let import = MotokoImport::try_from(line).context("Failed to create MotokoImport.")?; - match import { + match &import { MotokoImport::Relative(path) => { if !imports.nodes.contains_key(&import) { // Don't look up already looked up dependencies get_imports_recursive(cache, path.as_path(), imports)?; From 2b6878ba3423331201a793a20533811094ba3018 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 17:46:44 +0300 Subject: [PATCH 031/354] bug fix --- src/dfx/src/lib/builders/motoko.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index e3145b5837..aefb548cd5 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -67,6 +67,7 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor match &import { MotokoImport::Relative(path) => { if !imports.nodes.contains_key(&import) { // Don't look up already looked up dependencies + imports.nodes.insert(&import, ()); get_imports_recursive(cache, path.as_path(), imports)?; } } @@ -99,7 +100,7 @@ impl CanisterBuilder for MotokoBuilder { let motoko_info = info.as_info::()?; get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut())?; // TODO: slow operation - Ok(pool.imports.borrow_mut().nodes + Ok(pool.imports.borrow().nodes .iter() .filter_map(|import| { if let MotokoImport::Canister(name) = import.0 { From 8078ced8cbbdfc6679b582a21e78d406b9f83592 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 18:06:53 +0300 Subject: [PATCH 032/354] misc --- src/dfx/src/lib/builders/motoko.rs | 26 +++++++++++--------------- src/dfx/src/lib/models/canister.rs | 2 +- 2 files changed, 12 insertions(+), 16 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index aefb548cd5..35b9052e4b 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -51,9 +51,11 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor file: &Path, imports: &mut ImportsTracker, ) -> DfxResult { - if imports.nodes.contains_key(&MotokoImport::Relative(file.to_path_buf())) { + let parent = MotokoImport::Relative(file.to_path_buf()); + if imports.nodes.contains_key(&parent) { return Ok(()); } + imports.nodes.insert(parent.clone(), ()); let mut command = cache.get_binary_command("moc")?; let command = command.arg("--print-deps").arg(file); @@ -63,23 +65,17 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor let output = String::from_utf8_lossy(&output.stdout); for line in output.lines() { - let import = MotokoImport::try_from(line).context("Failed to create MotokoImport.")?; - match &import { + let child = MotokoImport::try_from(line).context("Failed to create MotokoImport.")?; + // TODO: The code seems screwed: Why recompile onluy on `Relative`? + match &child { MotokoImport::Relative(path) => { - if !imports.nodes.contains_key(&import) { // Don't look up already looked up dependencies - imports.nodes.insert(&import, ()); - get_imports_recursive(cache, path.as_path(), imports)?; - } - } - _ => { - let parent = MotokoImport::Relative(file.to_path_buf()); - // imports.insert(parent); - - let parent_node = imports.graph.add_node(parent); - let child_node = imports.graph.add_node(import); - imports.graph.add_edge(parent_node, child_node, ()); + get_imports_recursive(cache, path.as_path(), imports)?; } + _ => {} } + let parent_node = imports.graph.add_node(parent.clone()); + let child_node = imports.graph.add_node(child); + imports.graph.add_edge(parent_node, child_node, ()); } Ok(()) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index b6121fe349..4e3d91dce3 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -436,7 +436,7 @@ fn check_valid_subtype(compiled_idl_path: &Path, specified_idl_path: &Path) -> D } /// TODO: Motoko-specific code not here -#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash)] +#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)] pub enum MotokoImport { Canister(String), Ic(String), From fceda1d60cc9a416797aca7c446c0c8eb41f0484 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 18:55:28 +0300 Subject: [PATCH 033/354] misc --- src/dfx/src/lib/builders/motoko.rs | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 35b9052e4b..885b0c23eb 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -16,6 +16,7 @@ use dfx_core::config::cache::Cache; use dfx_core::config::model::dfinity::{MetadataVisibility, Profile}; use dfx_core::fs::metadata; use fn_error_context::context; +use petgraph::visit::Bfs; use slog::{info, o, trace, warn, Logger}; use std::collections::BTreeMap; use std::convert::TryFrom; @@ -45,7 +46,7 @@ impl MotokoBuilder { // TODO: Rename this function. #[context("Failed to find imports for canister at '{}'.", info.get_main_path().display())] fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut ImportsTracker) -> DfxResult<()> { - #[context("Failed recursive dependency detection at {}.", file.display())] // FIXME + #[context("Failed recursive dependency detection at {}.", file.display())] fn get_imports_recursive ( cache: &dyn Cache, file: &Path, @@ -94,7 +95,7 @@ impl CanisterBuilder for MotokoBuilder { info: &CanisterInfo, ) -> DfxResult> { let motoko_info = info.as_info::()?; - get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut())?; // TODO: slow operation + get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut())?; Ok(pool.imports.borrow().nodes .iter() @@ -145,7 +146,7 @@ impl CanisterBuilder for MotokoBuilder { std::fs::create_dir_all(idl_dir_path) .with_context(|| format!("Failed to create {}.", idl_dir_path.to_string_lossy()))?; - get_imports(cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut())?; // TODO: repeated slow operation + get_imports(cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut())?; // If the management canister is being imported, emit the candid file. if pool.imports.borrow().nodes.contains_key(&MotokoImport::Ic("aaaaa-aa".to_string())) @@ -172,11 +173,12 @@ impl CanisterBuilder for MotokoBuilder { // Check that one of the dependencies is newer than the target: if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { let wasm_file_time = wasm_file_metadata.modified()?; - let imports = pool.imports.borrow(); - let mut import_iter = imports.nodes.iter(); + let mut imports = pool.imports.borrow_mut(); + let start = imports.graph.add_node(MotokoImport::Relative(motoko_info.get_main_path().to_path_buf())); // Start with oput canister. + let mut import_iter = Bfs::new(&imports.graph, start); loop { - if let Some(import) = import_iter.next() { - let imported_file = match import.0 { + if let Some(import) = import_iter.next(&imports.graph) { + let imported_file = match &imports.graph[import] { MotokoImport::Canister(canister_name) => { if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = canister.get_info().get_main_file(); @@ -214,7 +216,7 @@ impl CanisterBuilder for MotokoBuilder { } else { Path::new(path.as_str()).to_owned() }; - let path2 = pre_path.to_string_lossy() + ".mo"; // TODO: Is `lossy` OK? + let path2 = pre_path.to_str().unwrap().to_owned() + ".mo"; let path2 = path2.to_string(); let path2 = Path::new(&path2); if path2.exists() { // TODO: Is it correct order of two variants? @@ -240,7 +242,7 @@ impl CanisterBuilder for MotokoBuilder { }; }; } else { - // trace!(log, "Canister {} already compiled", canister_info.get_name()); // TODO + // println!("Canister {} already compiled.", canister_info.get_name()); // TODO return Ok(BuildOutput { // duplicate code canister_id: canister_info .get_canister_id() From 4ce0f667146c73b4b0f476e00d526255cc363521 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 19:08:58 +0300 Subject: [PATCH 034/354] misc --- CHANGELOG.md | 2 ++ src/dfx/src/lib/builders/motoko.rs | 22 +++++++++++++++------- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b596d499f..c624585660 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,8 @@ ### feat: rebuild only necessary canisters +Cache `get_imports()` results. + Read only those `--print-deps` dependencies that are necessary to read. Don't compile canisters for which all dependencies are elder than the `.wasm` file. diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 885b0c23eb..54feada155 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -45,12 +45,13 @@ impl MotokoBuilder { // TODO: Rename this function. #[context("Failed to find imports for canister at '{}'.", info.get_main_path().display())] -fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut ImportsTracker) -> DfxResult<()> { +fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { #[context("Failed recursive dependency detection at {}.", file.display())] fn get_imports_recursive ( cache: &dyn Cache, file: &Path, imports: &mut ImportsTracker, + pool: &CanisterPool, ) -> DfxResult { let parent = MotokoImport::Relative(file.to_path_buf()); if imports.nodes.contains_key(&parent) { @@ -67,10 +68,17 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor for line in output.lines() { let child = MotokoImport::try_from(line).context("Failed to create MotokoImport.")?; - // TODO: The code seems screwed: Why recompile onluy on `Relative`? match &child { MotokoImport::Relative(path) => { - get_imports_recursive(cache, path.as_path(), imports)?; + get_imports_recursive(cache, path.as_path(), imports, pool)?; + } + MotokoImport::Canister(canister_name) => { // duplicate code + if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { + let main_file = canister.get_info().get_main_file(); + if let Some(main_file) = main_file { + get_imports_recursive(cache, Path::new(main_file), imports, pool)?; + } + } } _ => {} } @@ -82,7 +90,7 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor Ok(()) } - get_imports_recursive(cache, info.get_main_path(), imports)?; + get_imports_recursive(cache, info.get_main_path(), imports, pool)?; Ok(()) } @@ -95,7 +103,7 @@ impl CanisterBuilder for MotokoBuilder { info: &CanisterInfo, ) -> DfxResult> { let motoko_info = info.as_info::()?; - get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut())?; + get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut(), pool)?; Ok(pool.imports.borrow().nodes .iter() @@ -146,7 +154,7 @@ impl CanisterBuilder for MotokoBuilder { std::fs::create_dir_all(idl_dir_path) .with_context(|| format!("Failed to create {}.", idl_dir_path.to_string_lossy()))?; - get_imports(cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut())?; + get_imports(cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut(), pool)?; // If the management canister is being imported, emit the candid file. if pool.imports.borrow().nodes.contains_key(&MotokoImport::Ic("aaaaa-aa".to_string())) @@ -179,7 +187,7 @@ impl CanisterBuilder for MotokoBuilder { loop { if let Some(import) = import_iter.next(&imports.graph) { let imported_file = match &imports.graph[import] { - MotokoImport::Canister(canister_name) => { + MotokoImport::Canister(canister_name) => { // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = canister.get_info().get_main_file(); if let Some(main_file) = main_file { From cee904b612698e2ee5f737ef9b7cecb9f7b40f97 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 19:27:56 +0300 Subject: [PATCH 035/354] misc --- src/dfx/src/lib/builders/motoko.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 54feada155..6613bfb403 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -58,6 +58,9 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor return Ok(()); } imports.nodes.insert(parent.clone(), ()); + if let MotokoImport::Relative(path) = &parent { + println!("INSERTED: {}", path.display()); // FIXME + } let mut command = cache.get_binary_command("moc")?; let command = command.arg("--print-deps").arg(file); @@ -244,6 +247,7 @@ impl CanisterBuilder for MotokoBuilder { }; if let Some(imported_file) = imported_file { let imported_file_metadata = metadata(imported_file.as_ref())?; + println!("IMPORTED {}", imported_file.display()); // FIXME: Remove. let imported_file_time = imported_file_metadata.modified()?; if imported_file_time > wasm_file_time { break; From 891496d588ed47f7fec1f65ef348cc2f937de236 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 20:00:25 +0300 Subject: [PATCH 036/354] misc --- src/dfx/src/lib/builders/motoko.rs | 10 +++++++++- src/dfx/src/lib/canister_info/motoko.rs | 6 +++--- src/dfx/src/lib/models/canister.rs | 1 + 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 6613bfb403..09fbb6c941 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -53,6 +53,7 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor imports: &mut ImportsTracker, pool: &CanisterPool, ) -> DfxResult { + println!("CanisterInfo: {:#?}", file); let parent = MotokoImport::Relative(file.to_path_buf()); if imports.nodes.contains_key(&parent) { return Ok(()); @@ -86,13 +87,17 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor _ => {} } let parent_node = imports.graph.add_node(parent.clone()); - let child_node = imports.graph.add_node(child); + if let MotokoImport::Relative(child) = &child { + println!("INSERTED CHILD: {}", child.display()); // FIXME + } + let child_node = imports.graph.add_node(child); imports.graph.add_edge(parent_node, child_node, ()); } Ok(()) } + println!("CanisterInfo2: {:#?}", info.get_main_path()); get_imports_recursive(cache, info.get_main_path(), imports, pool)?; Ok(()) @@ -106,7 +111,9 @@ impl CanisterBuilder for MotokoBuilder { info: &CanisterInfo, ) -> DfxResult> { let motoko_info = info.as_info::()?; + println!("CanisterInfo3: {:#?}", info.get_main_file()); // FIXME get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut(), pool)?; + println!("CanisterInfo4: {:#?}", motoko_info.get_main_path()); // FIXME Ok(pool.imports.borrow().nodes .iter() @@ -129,6 +136,7 @@ impl CanisterBuilder for MotokoBuilder { config: &BuildConfig, ) -> DfxResult { let motoko_info = canister_info.as_info::()?; + println!("motoko_info: {}", motoko_info.get_main_path().display()); let profile = config.profile; let input_path = motoko_info.get_main_path(); let output_wasm_path = motoko_info.get_output_wasm_path(); diff --git a/src/dfx/src/lib/canister_info/motoko.rs b/src/dfx/src/lib/canister_info/motoko.rs index f187a8765d..28f3949dbb 100644 --- a/src/dfx/src/lib/canister_info/motoko.rs +++ b/src/dfx/src/lib/canister_info/motoko.rs @@ -54,7 +54,7 @@ impl MotokoCanisterInfo { impl CanisterInfoFactory for MotokoCanisterInfo { fn create(info: &CanisterInfo) -> DfxResult { - let workspace_root = info.get_workspace_root(); + // let workspace_root = info.get_workspace_root(); // FIXME: Is it correct that I commented it out? // I commented it out to have consistent relative paths. let name = info.get_name(); ensure!( matches!(info.type_specific, CanisterTypeProperties::Motoko { .. }), @@ -64,11 +64,11 @@ impl CanisterInfoFactory for MotokoCanisterInfo { let main_path = info .get_main_file() .context("`main` attribute is required on Motoko canisters in dfx.json")?; - let input_path = workspace_root.join(main_path); + let input_path = main_path.to_path_buf(); // workspace_root.join(main_path); let output_root = info.get_output_root().to_path_buf(); let output_wasm_path = output_root.join(name).with_extension("wasm"); let output_idl_path = if let Some(remote_candid) = info.get_remote_candid_if_remote() { - workspace_root.join(remote_candid) + remote_candid // workspace_root.join(remote_candid) } else { output_wasm_path.with_extension("did") }; diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 4e3d91dce3..b35df0057c 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -483,6 +483,7 @@ impl CanisterPool { _ => None, }; let info = CanisterInfo::load(pool_helper.config, canister_name, canister_id)?; + // println!("CanisterInfo: {:#?}", info); let builder = pool_helper.builder_pool.get(&info); pool_helper .canisters_map From ab718511b5d928021ecccb34a2af887af20502bd Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 20:36:43 +0300 Subject: [PATCH 037/354] bug fix --- src/dfx/src/lib/builders/motoko.rs | 34 ++++++++++++++++++++---------- src/dfx/src/lib/models/canister.rs | 3 +-- 2 files changed, 24 insertions(+), 13 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 09fbb6c941..52041ac787 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -53,12 +53,12 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor imports: &mut ImportsTracker, pool: &CanisterPool, ) -> DfxResult { - println!("CanisterInfo: {:#?}", file); let parent = MotokoImport::Relative(file.to_path_buf()); if imports.nodes.contains_key(&parent) { return Ok(()); } - imports.nodes.insert(parent.clone(), ()); + let parent_node_index = *imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); + imports.nodes.insert(parent.clone(), parent_node_index); if let MotokoImport::Relative(path) = &parent { println!("INSERTED: {}", path.display()); // FIXME } @@ -86,18 +86,21 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor } _ => {} } - let parent_node = imports.graph.add_node(parent.clone()); - if let MotokoImport::Relative(child) = &child { - println!("INSERTED CHILD: {}", child.display()); // FIXME - } - let child_node = imports.graph.add_node(child); - imports.graph.add_edge(parent_node, child_node, ()); + let parent_node_index = *imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); + let child_node_index = *imports.nodes.entry(child.clone()).or_insert_with(|| imports.graph.add_node(child.clone())); + // if let MotokoImport::Relative(parent) = &parent { + // println!("INSERTED PARENT: {} ({:?})", parent.display(), parent_node); // FIXME + // } + // let child_node = imports.graph.add_node(child.clone()); + // if let MotokoImport::Relative(child) = &child { + // println!("INSERTED CHILD: {} ({:?})", child.display(), child_node); // FIXME + // } + imports.graph.add_edge(parent_node_index, child_node_index, ()); } Ok(()) } - println!("CanisterInfo2: {:#?}", info.get_main_path()); get_imports_recursive(cache, info.get_main_path(), imports, pool)?; Ok(()) @@ -136,7 +139,6 @@ impl CanisterBuilder for MotokoBuilder { config: &BuildConfig, ) -> DfxResult { let motoko_info = canister_info.as_info::()?; - println!("motoko_info: {}", motoko_info.get_main_path().display()); let profile = config.profile; let input_path = motoko_info.get_main_path(); let output_wasm_path = motoko_info.get_output_wasm_path(); @@ -193,10 +195,20 @@ impl CanisterBuilder for MotokoBuilder { if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { let wasm_file_time = wasm_file_metadata.modified()?; let mut imports = pool.imports.borrow_mut(); - let start = imports.graph.add_node(MotokoImport::Relative(motoko_info.get_main_path().to_path_buf())); // Start with oput canister. + println!("START: {}", motoko_info.get_main_path().to_path_buf().display()); // FIXME + // let start = imports.graph.add_node(MotokoImport::Relative(motoko_info.get_main_path().to_path_buf())); // Start with oput canister. + let start = if let Some(node_index) = imports.nodes.get(&MotokoImport::Relative(motoko_info.get_main_path().to_path_buf())) { + *node_index + } else { + let node = MotokoImport::Relative(motoko_info.get_main_path().to_path_buf()); + let node_index = imports.graph.add_node(node.clone()); + imports.nodes.insert(node, node_index); + node_index + }; let mut import_iter = Bfs::new(&imports.graph, start); loop { if let Some(import) = import_iter.next(&imports.graph) { + println!("NodeIndex {:?}", import); let imported_file = match &imports.graph[import] { MotokoImport::Canister(canister_name) => { // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index b35df0057c..43d8d81285 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -446,7 +446,7 @@ pub enum MotokoImport { /// The graph of Motoko imports (TODO: Motoko-specific code not here) pub struct ImportsTracker { - pub nodes: HashMap, + pub nodes: HashMap, pub graph: DiGraph, } @@ -483,7 +483,6 @@ impl CanisterPool { _ => None, }; let info = CanisterInfo::load(pool_helper.config, canister_name, canister_id)?; - // println!("CanisterInfo: {:#?}", info); let builder = pool_helper.builder_pool.get(&info); pool_helper .canisters_map From 7881f3d35e8ed3679f029699091e1f75bf0d8c5c Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 14 Apr 2024 20:52:08 +0300 Subject: [PATCH 038/354] misc --- src/dfx/src/lib/builders/motoko.rs | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 52041ac787..8a9198029f 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -59,9 +59,6 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor } let parent_node_index = *imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); imports.nodes.insert(parent.clone(), parent_node_index); - if let MotokoImport::Relative(path) = &parent { - println!("INSERTED: {}", path.display()); // FIXME - } let mut command = cache.get_binary_command("moc")?; let command = command.arg("--print-deps").arg(file); @@ -88,13 +85,6 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor } let parent_node_index = *imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); let child_node_index = *imports.nodes.entry(child.clone()).or_insert_with(|| imports.graph.add_node(child.clone())); - // if let MotokoImport::Relative(parent) = &parent { - // println!("INSERTED PARENT: {} ({:?})", parent.display(), parent_node); // FIXME - // } - // let child_node = imports.graph.add_node(child.clone()); - // if let MotokoImport::Relative(child) = &child { - // println!("INSERTED CHILD: {} ({:?})", child.display(), child_node); // FIXME - // } imports.graph.add_edge(parent_node_index, child_node_index, ()); } @@ -114,9 +104,7 @@ impl CanisterBuilder for MotokoBuilder { info: &CanisterInfo, ) -> DfxResult> { let motoko_info = info.as_info::()?; - println!("CanisterInfo3: {:#?}", info.get_main_file()); // FIXME get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut(), pool)?; - println!("CanisterInfo4: {:#?}", motoko_info.get_main_path()); // FIXME Ok(pool.imports.borrow().nodes .iter() @@ -195,8 +183,6 @@ impl CanisterBuilder for MotokoBuilder { if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { let wasm_file_time = wasm_file_metadata.modified()?; let mut imports = pool.imports.borrow_mut(); - println!("START: {}", motoko_info.get_main_path().to_path_buf().display()); // FIXME - // let start = imports.graph.add_node(MotokoImport::Relative(motoko_info.get_main_path().to_path_buf())); // Start with oput canister. let start = if let Some(node_index) = imports.nodes.get(&MotokoImport::Relative(motoko_info.get_main_path().to_path_buf())) { *node_index } else { @@ -208,7 +194,6 @@ impl CanisterBuilder for MotokoBuilder { let mut import_iter = Bfs::new(&imports.graph, start); loop { if let Some(import) = import_iter.next(&imports.graph) { - println!("NodeIndex {:?}", import); let imported_file = match &imports.graph[import] { MotokoImport::Canister(canister_name) => { // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { @@ -267,7 +252,6 @@ impl CanisterBuilder for MotokoBuilder { }; if let Some(imported_file) = imported_file { let imported_file_metadata = metadata(imported_file.as_ref())?; - println!("IMPORTED {}", imported_file.display()); // FIXME: Remove. let imported_file_time = imported_file_metadata.modified()?; if imported_file_time > wasm_file_time { break; From 65ca9163e8c202c9c96d725a8e2f4b7d9702f136 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 16:36:24 +0300 Subject: [PATCH 039/354] rewritten (buggy) version --- src/dfx/src/lib/models/canister.rs | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 43d8d81285..dd0dbd37e2 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -593,23 +593,28 @@ impl CanisterPool { // Traverse the graph of dependencies starting from `real_canisters_to_build` set. let mut current_canisters_to_build = HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); + // FIXME: The loop hangs on `dfx deploy`. loop { let mut current_canisters_to_build2 = HashMap::new(); for canister in &self.canisters { if !current_canisters_to_build.contains_key(&canister.canister_id()) { break; } - let canister_id = canister.canister_id(); + // let canister_id = canister.canister_id(); let canister_info = &canister.info; - let deps = canister.builder.get_dependencies(self, canister_info)?; - if let Some(node_ix) = id_set.get(&canister_id) { - for d in deps { - if let Some(dep_ix) = id_set.get(&d) { - graph.add_edge(*node_ix, *dep_ix, ()); - current_canisters_to_build2.insert(*graph.node_weight(*dep_ix).unwrap(), ()); - } - } + // FIXME: Is `unwrap()` in the next operator correct? + let deps: Vec = canister.builder.get_dependencies(self, canister_info)? + .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. + println!("deps: {:?}", deps); + // if let Some(node_ix) = id_set.get(&canister_id) { + for d in deps { + // if let Some(dep_ix) = id_set.get(&d) { + // if graph.contains_edge(*node_ix, *dep_ix) { + current_canisters_to_build2.insert(d/* *graph.node_weight(*dep_ix).unwrap() */, ()); + // } + // } } + // } } if current_canisters_to_build2.is_empty() { // passed to the end of the graph break; From d51af4230043c196a3fbf988d7b5c7d2489aeb31 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 16:37:10 +0300 Subject: [PATCH 040/354] comment --- src/dfx/src/lib/models/canister.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index dd0dbd37e2..6c6da1725e 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -593,10 +593,9 @@ impl CanisterPool { // Traverse the graph of dependencies starting from `real_canisters_to_build` set. let mut current_canisters_to_build = HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); - // FIXME: The loop hangs on `dfx deploy`. loop { let mut current_canisters_to_build2 = HashMap::new(); - for canister in &self.canisters { + for canister in &self.canisters { // a little inefficient if !current_canisters_to_build.contains_key(&canister.canister_id()) { break; } @@ -605,7 +604,6 @@ impl CanisterPool { // FIXME: Is `unwrap()` in the next operator correct? let deps: Vec = canister.builder.get_dependencies(self, canister_info)? .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. - println!("deps: {:?}", deps); // if let Some(node_ix) = id_set.get(&canister_id) { for d in deps { // if let Some(dep_ix) = id_set.get(&d) { From 5fd83c05c53e740e64e823a44219bc228e6ae9d4 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 16:46:20 +0300 Subject: [PATCH 041/354] recovered code after I damaged it --- src/dfx/src/lib/models/canister.rs | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 6c6da1725e..bb96f4da40 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -599,20 +599,19 @@ impl CanisterPool { if !current_canisters_to_build.contains_key(&canister.canister_id()) { break; } - // let canister_id = canister.canister_id(); + let canister_id = canister.canister_id(); let canister_info = &canister.info; // FIXME: Is `unwrap()` in the next operator correct? let deps: Vec = canister.builder.get_dependencies(self, canister_info)? .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. - // if let Some(node_ix) = id_set.get(&canister_id) { - for d in deps { - // if let Some(dep_ix) = id_set.get(&d) { - // if graph.contains_edge(*node_ix, *dep_ix) { - current_canisters_to_build2.insert(d/* *graph.node_weight(*dep_ix).unwrap() */, ()); - // } - // } + if let Some(node_ix) = id_set.get(&canister_id) { + for d in deps { + if let Some(dep_ix) = id_set.get(&d) { + graph.add_edge(*node_ix, *dep_ix, ()); + } + current_canisters_to_build2.insert(d, ()); + } } - // } } if current_canisters_to_build2.is_empty() { // passed to the end of the graph break; From ffdba37af7357d9c6538a068658c0d35aad78ab1 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 17:13:04 +0300 Subject: [PATCH 042/354] simplified --- src/dfx/src/lib/models/canister.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index bb96f4da40..ecf8850c93 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -575,6 +575,7 @@ impl CanisterPool { id_set.insert(canister_id, graph.add_node(canister_id)); } + // FIXME: Verify after graph creation (and that creation does not stuck in an infinite loop). // Verify the graph has no cycles. if let Err(err) = petgraph::algo::toposort(&graph, None) { let message = match graph.node_weight(err.node_id()) { @@ -590,7 +591,7 @@ impl CanisterPool { )))) } - // Traverse the graph of dependencies starting from `real_canisters_to_build` set. + // Traverse, creating the graph of dependencies starting from `real_canisters_to_build` set. let mut current_canisters_to_build = HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); loop { @@ -606,9 +607,8 @@ impl CanisterPool { .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. if let Some(node_ix) = id_set.get(&canister_id) { for d in deps { - if let Some(dep_ix) = id_set.get(&d) { - graph.add_edge(*node_ix, *dep_ix, ()); - } + let dep_ix = id_set.get(&d).unwrap(); + graph.add_edge(*node_ix, *dep_ix, ()); current_canisters_to_build2.insert(d, ()); } } From 4f597cb2d6a85d8ea07d5a5943cafe9a90375eb6 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 18:13:53 +0300 Subject: [PATCH 043/354] bug fix --- src/dfx/src/lib/models/canister.rs | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index ecf8850c93..bc13ac84aa 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -569,11 +569,11 @@ impl CanisterPool { self.canisters.iter().map(|c| c.clone()).collect::>() }; - // Add all the canisters as nodes. - for canister in &self.canisters { - let canister_id = canister.info.get_canister_id()?; - id_set.insert(canister_id, graph.add_node(canister_id)); - } + // [DO NOT] Add all the canisters as nodes. + // for canister in &self.canisters { + // let canister_id = canister.info.get_canister_id()?; + // id_set.insert(canister_id, graph.add_node(canister_id)); + // } // FIXME: Verify after graph creation (and that creation does not stuck in an infinite loop). // Verify the graph has no cycles. @@ -605,12 +605,11 @@ impl CanisterPool { // FIXME: Is `unwrap()` in the next operator correct? let deps: Vec = canister.builder.get_dependencies(self, canister_info)? .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. - if let Some(node_ix) = id_set.get(&canister_id) { - for d in deps { - let dep_ix = id_set.get(&d).unwrap(); - graph.add_edge(*node_ix, *dep_ix, ()); - current_canisters_to_build2.insert(d, ()); - } + let node_ix = *id_set.entry(canister_id).or_insert_with(|| graph.add_node(canister_id)); + for d in deps { + let dep_ix = *id_set.entry(d).or_insert_with(|| graph.add_node(d)); + graph.add_edge(node_ix, dep_ix, ()); + current_canisters_to_build2.insert(d, ()); } } if current_canisters_to_build2.is_empty() { // passed to the end of the graph @@ -762,14 +761,16 @@ impl CanisterPool { .collect(); // let canisters_to_build = Bfs::new(graph, start); - let canisters_to_build = self.canisters_to_build(build_config); + // let canisters_to_build = self.canisters_to_build(build_config); // FIXME + // TODO: The next line is slow and confusing code. + let canisters_to_build: Vec<&Arc> = self.canisters.iter().filter(|c| order.contains(&c.canister_id())).collect(); let mut result = Vec::new(); for canister_id in &order { if let Some(canister) = self.get_canister(canister_id) { if canisters_to_build .iter() .map(|c| c.get_name()) - .contains(&canister.get_name()) + .contains(&canister.get_name()) // TODO: slow { trace!(log, "Building canister '{}'.", canister.get_name()); } else { @@ -893,6 +894,7 @@ impl CanisterPool { Ok(()) } + // FIXME: Remove this function pub fn canisters_to_build(&self, build_config: &BuildConfig) -> Vec<&Arc> { if let Some(canister_names) = &build_config.canisters_to_build { self.canisters From dcbba40a171e01535c3929b9d71468a86181005e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 18:23:48 +0300 Subject: [PATCH 044/354] comment --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index bc13ac84aa..8991979d0e 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -894,7 +894,7 @@ impl CanisterPool { Ok(()) } - // FIXME: Remove this function + // FIXME: Is this function miused? pub fn canisters_to_build(&self, build_config: &BuildConfig) -> Vec<&Arc> { if let Some(canister_names) = &build_config.canisters_to_build { self.canisters From b8b89ea21e694434397e4db758316e87442916b4 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 18:29:56 +0300 Subject: [PATCH 045/354] bug fix --- src/dfx/src/lib/models/canister.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 8991979d0e..4104ba345a 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -594,6 +594,9 @@ impl CanisterPool { // Traverse, creating the graph of dependencies starting from `real_canisters_to_build` set. let mut current_canisters_to_build = HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); + for canister_id in current_canisters_to_build.keys() { + graph.add_node(*canister_id); + } loop { let mut current_canisters_to_build2 = HashMap::new(); for canister in &self.canisters { // a little inefficient From 52e489c3d6d5b87344e4a6262b30a5d8804a50c9 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 18:47:21 +0300 Subject: [PATCH 046/354] bug fix --- src/dfx/src/lib/models/canister.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 4104ba345a..49cb489979 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -595,7 +595,7 @@ impl CanisterPool { let mut current_canisters_to_build = HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); for canister_id in current_canisters_to_build.keys() { - graph.add_node(*canister_id); + id_set.insert(*canister_id, graph.add_node(*canister_id)); } loop { let mut current_canisters_to_build2 = HashMap::new(); @@ -621,6 +621,7 @@ impl CanisterPool { current_canisters_to_build = current_canisters_to_build2; } + println!("id_set: {:?}", id_set.keys()); Ok(graph) } From 62022b72fbb0f91f42ec5ed97d935d58b21587c2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 18:47:50 +0300 Subject: [PATCH 047/354] missing FIXME --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 49cb489979..d73ba318cb 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -621,7 +621,7 @@ impl CanisterPool { current_canisters_to_build = current_canisters_to_build2; } - println!("id_set: {:?}", id_set.keys()); + println!("id_set: {:?}", id_set.keys()); // FIXME: Remove. Ok(graph) } From bf00c73dde913e10c368e141c56e8a9b846fa32e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 19:16:11 +0300 Subject: [PATCH 048/354] misc --- src/dfx/src/lib/models/canister.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index d73ba318cb..cfc58b0471 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -560,6 +560,7 @@ impl CanisterPool { let mut id_set: BTreeMap> = BTreeMap::new(); // TODO: Can be done faster by not using `collect` and/or `clone`? + // `real_canisters_to_build` are canisters that user explicitly requested to build. let real_canisters_to_build = if let Some(canisters_to_build) = canisters_to_build { let canisters_to_build_map: HashMap<&str, ()> = canisters_to_build.iter().map(|e| (e.as_str(), ())).collect(); self.canisters.iter() @@ -599,15 +600,20 @@ impl CanisterPool { } loop { let mut current_canisters_to_build2 = HashMap::new(); + // println!("self.canisters.len(): {}", self.canisters.len()); for canister in &self.canisters { // a little inefficient + // FIXME: Remove: + println!("current_canisters_to_build={:?} canister={}", + current_canisters_to_build.keys().map(|c| c.to_text()).collect::>(), canister.canister_id()); if !current_canisters_to_build.contains_key(&canister.canister_id()) { - break; + continue; } let canister_id = canister.canister_id(); let canister_info = &canister.info; // FIXME: Is `unwrap()` in the next operator correct? let deps: Vec = canister.builder.get_dependencies(self, canister_info)? .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. + println!("deps.len(): {}", deps.len()); let node_ix = *id_set.entry(canister_id).or_insert_with(|| graph.add_node(canister_id)); for d in deps { let dep_ix = *id_set.entry(d).or_insert_with(|| graph.add_node(d)); From 3e74af601b165def3f9bb79280d35733f319f88a Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 15 Apr 2024 20:43:29 +0300 Subject: [PATCH 049/354] attempted bug fix --- src/dfx/src/lib/models/canister.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index cfc58b0471..f8b519dc84 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -596,7 +596,8 @@ impl CanisterPool { let mut current_canisters_to_build = HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); for canister_id in current_canisters_to_build.keys() { - id_set.insert(*canister_id, graph.add_node(*canister_id)); + id_set.entry(*canister_id).or_insert_with(|| graph.add_node(*canister_id)); + // id_set.insert(*canister_id, graph.add_node(*canister_id)); // TODO: Use this, instead. } loop { let mut current_canisters_to_build2 = HashMap::new(); From 67b2f63b613f60a4c709ec541bf8fb201907eb4a Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 09:28:37 +0300 Subject: [PATCH 050/354] trying to fix a bug --- src/dfx/src/lib/builders/motoko.rs | 46 +++++++++++++++++++++++------- src/dfx/src/lib/models/canister.rs | 32 ++++++++++----------- 2 files changed, 51 insertions(+), 27 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 8a9198029f..063a6414f3 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -106,17 +106,41 @@ impl CanisterBuilder for MotokoBuilder { let motoko_info = info.as_info::()?; get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut(), pool)?; - Ok(pool.imports.borrow().nodes - .iter() - .filter_map(|import| { - if let MotokoImport::Canister(name) = import.0 { - pool.get_first_canister_with_name(name.as_str()) - } else { - None - } - }) - .map(|canister| canister.canister_id()) - .collect()) + // let iter = Bfs::new(pool.imports.borrow().graph); + match petgraph::algo::toposort(&pool.imports.borrow().graph, None) { + Ok(order) => { + let graph = &pool.imports.borrow().graph; + Ok(order.into_iter().filter_map(|id| match graph.node_weight(id) { + Some(MotokoImport::Canister(name)) => pool.get_first_canister_with_name(name.as_str()), // TODO: a little inefficient + _ => None, + }).map(|canister| canister.canister_id()).collect()) + } + Err(err) => { + let graph = &pool.imports.borrow().graph; + let message = match graph.node_weight(err.node_id()) { + Some(canister_id) => match canister_id { + MotokoImport::Canister(name) => name.clone(), // TODO: Can deal without `clone()`? + _ => "".to_string(), + }, + None => "".to_string(), + }; + return Err(DfxError::new(BuildError::DependencyError(format!( + "Found circular dependency: {}", + message + )))); + } + } + // Ok(pool.imports.borrow().nodes + // .iter() + // .filter_map(|import| { + // if let MotokoImport::Canister(name) = import.0 { + // pool.get_first_canister_with_name(name.as_str()) + // } else { + // None + // } + // }) + // .map(|canister| canister.canister_id()) + // .collect()) } #[context("Failed to build Motoko canister '{}'.", canister_info.get_name())] diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index f8b519dc84..01a69fb779 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -578,19 +578,19 @@ impl CanisterPool { // FIXME: Verify after graph creation (and that creation does not stuck in an infinite loop). // Verify the graph has no cycles. - if let Err(err) = petgraph::algo::toposort(&graph, None) { - let message = match graph.node_weight(err.node_id()) { - Some(canister_id) => match self.get_canister_info(canister_id) { - Some(info) => info.get_name().to_string(), - None => format!("<{}>", canister_id.to_text()), - }, - None => "".to_string(), - }; - return Err(DfxError::new(BuildError::DependencyError(format!( - "Found circular dependency: {}", - message - )))) - } + // if let Err(err) = petgraph::algo::toposort(&graph, None) { + // let message = match graph.node_weight(err.node_id()) { + // Some(canister_id) => match self.get_canister_info(canister_id) { + // Some(info) => info.get_name().to_string(), + // None => format!("<{}>", canister_id.to_text()), + // }, + // None => "".to_string(), + // }; + // return Err(DfxError::new(BuildError::DependencyError(format!( + // "Found circular dependency: {}", + // message + // )))) + // } // Traverse, creating the graph of dependencies starting from `real_canisters_to_build` set. let mut current_canisters_to_build = @@ -604,8 +604,6 @@ impl CanisterPool { // println!("self.canisters.len(): {}", self.canisters.len()); for canister in &self.canisters { // a little inefficient // FIXME: Remove: - println!("current_canisters_to_build={:?} canister={}", - current_canisters_to_build.keys().map(|c| c.to_text()).collect::>(), canister.canister_id()); if !current_canisters_to_build.contains_key(&canister.canister_id()) { continue; } @@ -614,14 +612,16 @@ impl CanisterPool { // FIXME: Is `unwrap()` in the next operator correct? let deps: Vec = canister.builder.get_dependencies(self, canister_info)? .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. - println!("deps.len(): {}", deps.len()); + // println!("PARENT: {}, DEPS: {:?}", canister.get_info().get_canister_id()?.to_text(), deps.iter().map(|c| c.to_text()).collect::>()); // FIXME let node_ix = *id_set.entry(canister_id).or_insert_with(|| graph.add_node(canister_id)); for d in deps { let dep_ix = *id_set.entry(d).or_insert_with(|| graph.add_node(d)); graph.add_edge(node_ix, dep_ix, ()); current_canisters_to_build2.insert(d, ()); + println!("canister_id={} -> d={}", canister_id, d); } } + println!("NEXT CYCLE"); // FIXME: Remove. if current_canisters_to_build2.is_empty() { // passed to the end of the graph break; } From eb491d9b13a85096ada08bf545a703e6ed584c4d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 09:29:37 +0300 Subject: [PATCH 051/354] refactor --- src/dfx/src/lib/builders/motoko.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 063a6414f3..f88fa861f4 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -106,17 +106,15 @@ impl CanisterBuilder for MotokoBuilder { let motoko_info = info.as_info::()?; get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut(), pool)?; - // let iter = Bfs::new(pool.imports.borrow().graph); + let graph = &pool.imports.borrow().graph; match petgraph::algo::toposort(&pool.imports.borrow().graph, None) { Ok(order) => { - let graph = &pool.imports.borrow().graph; Ok(order.into_iter().filter_map(|id| match graph.node_weight(id) { Some(MotokoImport::Canister(name)) => pool.get_first_canister_with_name(name.as_str()), // TODO: a little inefficient _ => None, }).map(|canister| canister.canister_id()).collect()) } Err(err) => { - let graph = &pool.imports.borrow().graph; let message = match graph.node_weight(err.node_id()) { Some(canister_id) => match canister_id { MotokoImport::Canister(name) => name.clone(), // TODO: Can deal without `clone()`? From 6e46a8f581e9484f70e228dd8f703f1b2ded6911 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 10:02:19 +0300 Subject: [PATCH 052/354] it works! --- src/dfx/src/lib/models/canister.rs | 69 +++++++++++++++++------------- 1 file changed, 40 insertions(+), 29 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 01a69fb779..1024c9783e 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -556,7 +556,7 @@ impl CanisterPool { /// Build only dependencies relevant for `canisters_to_build`. #[context("Failed to build dependencies graph for canister pool.")] fn build_dependencies_graph(&self, canisters_to_build: Option>) -> DfxResult> { - let mut graph: DiGraph = DiGraph::new(); + let mut graph: DiGraph = DiGraph::new(); // TODO: hack: we below transform DiGraph to DiGraph let mut id_set: BTreeMap> = BTreeMap::new(); // TODO: Can be done faster by not using `collect` and/or `clone`? @@ -593,43 +593,54 @@ impl CanisterPool { // } // Traverse, creating the graph of dependencies starting from `real_canisters_to_build` set. - let mut current_canisters_to_build = - HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); - for canister_id in current_canisters_to_build.keys() { - id_set.entry(*canister_id).or_insert_with(|| graph.add_node(*canister_id)); - // id_set.insert(*canister_id, graph.add_node(*canister_id)); // TODO: Use this, instead. - } - loop { - let mut current_canisters_to_build2 = HashMap::new(); - // println!("self.canisters.len(): {}", self.canisters.len()); + // let mut current_canisters_to_build = + // HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); + // for canister_id in current_canisters_to_build.keys() { + // id_set.entry(*canister_id).or_insert_with(|| graph.add_node(*canister_id)); + // // id_set.insert(*canister_id, graph.add_node(*canister_id)); // TODO: Use this, instead. + // } + // loop { + // let mut current_canisters_to_build2 = HashMap::new(); + // // println!("self.canisters.len(): {}", self.canisters.len()); for canister in &self.canisters { // a little inefficient - // FIXME: Remove: - if !current_canisters_to_build.contains_key(&canister.canister_id()) { - continue; - } + // if !current_canisters_to_build.contains_key(&canister.canister_id()) { + // continue; + // } let canister_id = canister.canister_id(); let canister_info = &canister.info; // FIXME: Is `unwrap()` in the next operator correct? let deps: Vec = canister.builder.get_dependencies(self, canister_info)? .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. // println!("PARENT: {}, DEPS: {:?}", canister.get_info().get_canister_id()?.to_text(), deps.iter().map(|c| c.to_text()).collect::>()); // FIXME - let node_ix = *id_set.entry(canister_id).or_insert_with(|| graph.add_node(canister_id)); - for d in deps { - let dep_ix = *id_set.entry(d).or_insert_with(|| graph.add_node(d)); - graph.add_edge(node_ix, dep_ix, ()); - current_canisters_to_build2.insert(d, ()); - println!("canister_id={} -> d={}", canister_id, d); - } + // let node_ix = *id_set.entry(canister_id).or_insert_with(|| graph.add_node(canister_id)); + // for d in deps { + // let dep_ix = *id_set.entry(d).or_insert_with(|| graph.add_node(d)); + // graph.add_edge(node_ix, dep_ix, ()); + // current_canisters_to_build2.insert(d, ()); + // println!("canister_id={} -> d={}", canister_id, d); + // } } - println!("NEXT CYCLE"); // FIXME: Remove. - if current_canisters_to_build2.is_empty() { // passed to the end of the graph - break; - } - current_canisters_to_build = current_canisters_to_build2; - } + // println!("NEXT CYCLE"); // FIXME: Remove. + // if current_canisters_to_build2.is_empty() { // passed to the end of the graph + // break; + // } + // current_canisters_to_build = current_canisters_to_build2; + // } - println!("id_set: {:?}", id_set.keys()); // FIXME: Remove. - Ok(graph) + // Ok(graph) + Ok(self.imports.borrow().graph.filter_map( + |node_index, node_weight| { + // B::from(node_weight) + match node_weight { + // TODO: `get_first_canister_with_name` is a hack + MotokoImport::Canister(name) => Some(self.get_first_canister_with_name(&name).unwrap().canister_id()), + _ => None, + } + }, + |edge_index, edge_weight| { + Some(()) + } + )) } #[context("Failed step_prebuild_all.")] From 79faa772dd3b8a7683ed9670a1447ecd0b2893ba Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 10:05:25 +0300 Subject: [PATCH 053/354] removed unnecessary code --- src/dfx/src/lib/models/canister.rs | 89 +++++------------------------- 1 file changed, 15 insertions(+), 74 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 1024c9783e..a85770bc34 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -24,7 +24,7 @@ use petgraph::graph::{DiGraph, NodeIndex}; use rand::{thread_rng, RngCore}; use slog::{error, info, trace, warn, Logger}; use std::cell::RefCell; -use std::collections::{BTreeMap, HashMap, HashSet}; +use std::collections::{HashMap, HashSet}; use std::convert::TryFrom; use std::ffi::OsStr; use std::io::Read; @@ -554,82 +554,23 @@ impl CanisterPool { } /// Build only dependencies relevant for `canisters_to_build`. + /// + /// FIXME: unused argument. #[context("Failed to build dependencies graph for canister pool.")] - fn build_dependencies_graph(&self, canisters_to_build: Option>) -> DfxResult> { - let mut graph: DiGraph = DiGraph::new(); // TODO: hack: we below transform DiGraph to DiGraph - let mut id_set: BTreeMap> = BTreeMap::new(); - - // TODO: Can be done faster by not using `collect` and/or `clone`? - // `real_canisters_to_build` are canisters that user explicitly requested to build. - let real_canisters_to_build = if let Some(canisters_to_build) = canisters_to_build { - let canisters_to_build_map: HashMap<&str, ()> = canisters_to_build.iter().map(|e| (e.as_str(), ())).collect(); - self.canisters.iter() - .filter(|c| canisters_to_build_map.contains_key(c.get_name())) - .map(|c| c.clone()).collect::>() - } else { - self.canisters.iter().map(|c| c.clone()).collect::>() - }; - - // [DO NOT] Add all the canisters as nodes. - // for canister in &self.canisters { - // let canister_id = canister.info.get_canister_id()?; - // id_set.insert(canister_id, graph.add_node(canister_id)); - // } - - // FIXME: Verify after graph creation (and that creation does not stuck in an infinite loop). - // Verify the graph has no cycles. - // if let Err(err) = petgraph::algo::toposort(&graph, None) { - // let message = match graph.node_weight(err.node_id()) { - // Some(canister_id) => match self.get_canister_info(canister_id) { - // Some(info) => info.get_name().to_string(), - // None => format!("<{}>", canister_id.to_text()), - // }, - // None => "".to_string(), - // }; - // return Err(DfxError::new(BuildError::DependencyError(format!( - // "Found circular dependency: {}", - // message - // )))) - // } - - // Traverse, creating the graph of dependencies starting from `real_canisters_to_build` set. - // let mut current_canisters_to_build = - // HashMap::from_iter(real_canisters_to_build.iter().map(|c| (c.canister_id(), ()))); - // for canister_id in current_canisters_to_build.keys() { - // id_set.entry(*canister_id).or_insert_with(|| graph.add_node(*canister_id)); - // // id_set.insert(*canister_id, graph.add_node(*canister_id)); // TODO: Use this, instead. - // } - // loop { - // let mut current_canisters_to_build2 = HashMap::new(); - // // println!("self.canisters.len(): {}", self.canisters.len()); - for canister in &self.canisters { // a little inefficient - // if !current_canisters_to_build.contains_key(&canister.canister_id()) { - // continue; - // } - let canister_id = canister.canister_id(); - let canister_info = &canister.info; - // FIXME: Is `unwrap()` in the next operator correct? - let deps: Vec = canister.builder.get_dependencies(self, canister_info)? - .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. - // println!("PARENT: {}, DEPS: {:?}", canister.get_info().get_canister_id()?.to_text(), deps.iter().map(|c| c.to_text()).collect::>()); // FIXME - // let node_ix = *id_set.entry(canister_id).or_insert_with(|| graph.add_node(canister_id)); - // for d in deps { - // let dep_ix = *id_set.entry(d).or_insert_with(|| graph.add_node(d)); - // graph.add_edge(node_ix, dep_ix, ()); - // current_canisters_to_build2.insert(d, ()); - // println!("canister_id={} -> d={}", canister_id, d); - // } - } - // println!("NEXT CYCLE"); // FIXME: Remove. - // if current_canisters_to_build2.is_empty() { // passed to the end of the graph - // break; + fn build_dependencies_graph(&self, _canisters_to_build: Option>) -> DfxResult> { + for canister in &self.canisters { // a little inefficient + // if !current_canisters_to_build.contains_key(&canister.canister_id()) { + // continue; // } - // current_canisters_to_build = current_canisters_to_build2; - // } + let canister_info = &canister.info; + // FIXME: Is `unwrap()` in the next operator correct? + // TODO: Ignored return value is a hack + let _deps: Vec = canister.builder.get_dependencies(self, canister_info)? + .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. + } - // Ok(graph) Ok(self.imports.borrow().graph.filter_map( - |node_index, node_weight| { + |_node_index, node_weight| { // B::from(node_weight) match node_weight { // TODO: `get_first_canister_with_name` is a hack @@ -637,7 +578,7 @@ impl CanisterPool { _ => None, } }, - |edge_index, edge_weight| { + |_edge_index, _edge_weight| { Some(()) } )) From 8045b53f00c2f2b9549bc3ff8d66fb65ffd4ef48 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 10:13:56 +0300 Subject: [PATCH 054/354] comments --- src/dfx/src/lib/models/canister.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index a85770bc34..47b7929445 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -559,19 +559,15 @@ impl CanisterPool { #[context("Failed to build dependencies graph for canister pool.")] fn build_dependencies_graph(&self, _canisters_to_build: Option>) -> DfxResult> { for canister in &self.canisters { // a little inefficient - // if !current_canisters_to_build.contains_key(&canister.canister_id()) { - // continue; - // } let canister_info = &canister.info; // FIXME: Is `unwrap()` in the next operator correct? - // TODO: Ignored return value is a hack + // TODO: Ignored return value is a hack. let _deps: Vec = canister.builder.get_dependencies(self, canister_info)? .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. } Ok(self.imports.borrow().graph.filter_map( |_node_index, node_weight| { - // B::from(node_weight) match node_weight { // TODO: `get_first_canister_with_name` is a hack MotokoImport::Canister(name) => Some(self.get_first_canister_with_name(&name).unwrap().canister_id()), From 0a89fe40edc6578d11072d734a451376e34b8f42 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 10:53:46 +0300 Subject: [PATCH 055/354] attempted fix (creates another bug) --- src/dfx/src/lib/models/canister.rs | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 47b7929445..90c4cba707 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -554,16 +554,22 @@ impl CanisterPool { } /// Build only dependencies relevant for `canisters_to_build`. - /// - /// FIXME: unused argument. #[context("Failed to build dependencies graph for canister pool.")] - fn build_dependencies_graph(&self, _canisters_to_build: Option>) -> DfxResult> { + fn build_dependencies_graph(&self, canisters_to_build: Option>) -> DfxResult> { + // println!("canisters_to_build: {:?}", canisters_to_build); for canister in &self.canisters { // a little inefficient - let canister_info = &canister.info; - // FIXME: Is `unwrap()` in the next operator correct? - // TODO: Ignored return value is a hack. - let _deps: Vec = canister.builder.get_dependencies(self, canister_info)? - .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. + let contains = if let Some(canisters_to_build) = &canisters_to_build { + canisters_to_build.iter().contains(&canister.get_info().get_name().to_string()) // TODO: a little slow + } else { + true // because user specified to build all canisters + }; + if contains { + let canister_info = &canister.info; + // FIXME: Is `unwrap()` in the next operator correct? + // TODO: Ignored return value is a hack. + let _deps: Vec = canister.builder.get_dependencies(self, canister_info)? + .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. + } } Ok(self.imports.borrow().graph.filter_map( From 2cbad6d43d5786dbc871ba23455a8cb87d753050 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 11:25:47 +0300 Subject: [PATCH 056/354] bug fix --- src/dfx/src/lib/builders/motoko.rs | 24 +++++++++++++++--------- src/dfx/src/lib/models/canister.rs | 5 +++++ 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index f88fa861f4..9eb3429ea4 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -44,16 +44,23 @@ impl MotokoBuilder { } // TODO: Rename this function. -#[context("Failed to find imports for canister at '{}'.", info.get_main_path().display())] -fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { +// TODO: Is `unwrap()` in the next line correct? +#[context("Failed to find imports for canister at '{}'.", info.as_info::().unwrap().get_main_path().display())] +fn get_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { + let motoko_info = info.as_info::()?; #[context("Failed recursive dependency detection at {}.", file.display())] fn get_imports_recursive ( cache: &dyn Cache, file: &Path, imports: &mut ImportsTracker, pool: &CanisterPool, + top: Option<&CanisterInfo>, // hackish ) -> DfxResult { - let parent = MotokoImport::Relative(file.to_path_buf()); + let parent = if let Some(top) = top { + MotokoImport::Canister(top.get_name().to_string()) // a little inefficient + } else { + MotokoImport::Relative(file.to_path_buf()) + }; if imports.nodes.contains_key(&parent) { return Ok(()); } @@ -71,13 +78,13 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor let child = MotokoImport::try_from(line).context("Failed to create MotokoImport.")?; match &child { MotokoImport::Relative(path) => { - get_imports_recursive(cache, path.as_path(), imports, pool)?; + get_imports_recursive(cache, path.as_path(), imports, pool, None)?; } MotokoImport::Canister(canister_name) => { // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = canister.get_info().get_main_file(); if let Some(main_file) = main_file { - get_imports_recursive(cache, Path::new(main_file), imports, pool)?; + get_imports_recursive(cache, Path::new(main_file), imports, pool, None)?; } } } @@ -91,7 +98,7 @@ fn get_imports(cache: &dyn Cache, info: &MotokoCanisterInfo, imports: &mut Impor Ok(()) } - get_imports_recursive(cache, info.get_main_path(), imports, pool)?; + get_imports_recursive(cache, motoko_info.get_main_path(), imports, pool, Some(info))?; Ok(()) } @@ -103,8 +110,7 @@ impl CanisterBuilder for MotokoBuilder { pool: &CanisterPool, info: &CanisterInfo, ) -> DfxResult> { - let motoko_info = info.as_info::()?; - get_imports(self.cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut(), pool)?; + get_imports(self.cache.as_ref(), info, &mut *pool.imports.borrow_mut(), pool)?; let graph = &pool.imports.borrow().graph; match petgraph::algo::toposort(&pool.imports.borrow().graph, None) { @@ -177,7 +183,7 @@ impl CanisterBuilder for MotokoBuilder { std::fs::create_dir_all(idl_dir_path) .with_context(|| format!("Failed to create {}.", idl_dir_path.to_string_lossy()))?; - get_imports(cache.as_ref(), &motoko_info, &mut *pool.imports.borrow_mut(), pool)?; + get_imports(cache.as_ref(), canister_info, &mut *pool.imports.borrow_mut(), pool)?; // If the management canister is being imported, emit the candid file. if pool.imports.borrow().nodes.contains_key(&MotokoImport::Ic("aaaaa-aa".to_string())) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 90c4cba707..3f48824444 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -572,9 +572,12 @@ impl CanisterPool { } } + println!("GRAPH2: {:?}", self.imports.borrow().graph); + // FIXME: Error on indirect dependencies. Ok(self.imports.borrow().graph.filter_map( |_node_index, node_weight| { match node_weight { + // FIXME: The "tops" of the digraph are `Relative()`, not `Canister()` // TODO: `get_first_canister_with_name` is a hack MotokoImport::Canister(name) => Some(self.get_first_canister_with_name(&name).unwrap().canister_id()), _ => None, @@ -709,6 +712,7 @@ impl CanisterPool { trace!(log, "Building dependencies graph."); let graph = self.build_dependencies_graph(build_config.canisters_to_build.clone())?; // TODO: Can `clone` be eliminated? + println!("GRAPH: {:?}", graph); let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { Some(canister_id) => match self.get_canister_info(canister_id) { @@ -724,6 +728,7 @@ impl CanisterPool { .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. .map(|idx| *graph.node_weight(*idx).unwrap()) .collect(); + println!("ORDER: {:?}", order.iter().map(|c| c.to_text()).collect::>()); // let canisters_to_build = Bfs::new(graph, start); // let canisters_to_build = self.canisters_to_build(build_config); // FIXME From 95b22d999143dfb3dfbc35fcfa3c06db0abf242c Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 11:30:59 +0300 Subject: [PATCH 057/354] removed commented out code --- src/dfx/src/lib/builders/motoko.rs | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 9eb3429ea4..e7ae95ab15 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -134,17 +134,6 @@ impl CanisterBuilder for MotokoBuilder { )))); } } - // Ok(pool.imports.borrow().nodes - // .iter() - // .filter_map(|import| { - // if let MotokoImport::Canister(name) = import.0 { - // pool.get_first_canister_with_name(name.as_str()) - // } else { - // None - // } - // }) - // .map(|canister| canister.canister_id()) - // .collect()) } #[context("Failed to build Motoko canister '{}'.", canister_info.get_name())] From e63b7d35954303db6dc74b3e503c6dde72364f30 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 12:27:40 +0300 Subject: [PATCH 058/354] half written --- src/dfx/src/lib/models/canister.rs | 65 ++++++++++++++++++++++++------ 1 file changed, 52 insertions(+), 13 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 3f48824444..8f18817b5c 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -7,6 +7,7 @@ use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::metadata::dfx::DfxMetadata; use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE, DFX}; +use crate::lib::operations::canister; use crate::lib::wasm::file::{compress_bytes, read_wasm_module}; use crate::util::assets; use anyhow::{anyhow, bail, Context}; @@ -21,6 +22,7 @@ use ic_wasm::metadata::{add_metadata, remove_metadata, Kind}; use ic_wasm::optimize::OptLevel; use itertools::Itertools; use petgraph::graph::{DiGraph, NodeIndex}; +use petgraph::visit::Dfs; use rand::{thread_rng, RngCore}; use slog::{error, info, trace, warn, Logger}; use std::cell::RefCell; @@ -572,21 +574,58 @@ impl CanisterPool { } } - println!("GRAPH2: {:?}", self.imports.borrow().graph); - // FIXME: Error on indirect dependencies. - Ok(self.imports.borrow().graph.filter_map( - |_node_index, node_weight| { - match node_weight { - // FIXME: The "tops" of the digraph are `Relative()`, not `Canister()` - // TODO: `get_first_canister_with_name` is a hack - MotokoImport::Canister(name) => Some(self.get_first_canister_with_name(&name).unwrap().canister_id()), - _ => None, + let real_canisters_to_build = match canisters_to_build { + Some(canisters_to_build) => canisters_to_build, + None => self.canisters.iter().map(|canister| canister.get_name().to_string()).collect(), + }; + // Transform the graph of file dependencies to graph of canister dependencies. + // For this do DFS for each of `real_canisters_to_build`. + let source_graph = &self.imports.borrow().graph; + let mut dest_graph: DiGraph = DiGraph::new(); + let mut dest_id_set = HashMap::new(); + let mut name_to_dest = HashMap::new(); + for start_name in real_canisters_to_build.iter() { + let dest_start = self.get_first_canister_with_name(&start_name).unwrap().canister_id(); + let dest_start = *dest_id_set.entry(dest_start.clone()).or_insert_with(|| dest_graph.add_node(dest_start.clone())); // TODO: always inserts + name_to_dest.insert(start_name, dest_start); + let mut iter = Dfs::new(&source_graph, dest_start); + iter.next(&source_graph); + while let Some(cur_source_id) = iter.next(&source_graph) { + let cur_source_node = source_graph.node_weight(cur_source_id).unwrap(); + if let MotokoImport::Canister(name) = cur_source_node { + let parent_in_source_id = *iter.stack.iter().rev().find( + |&entry| + if let Some(MotokoImport::Canister(_parent_name)) = source_graph.node_weight(*entry) { + true + } else { + false + } + ).unwrap(); + // Both parent and current ancestor are `Canister` dependencies. + let parent_in_dest_id = + name_to_dest.entry(parent_in_source_id).or_insert_with(|| dest_graph.add_node(cur_canister_id)); + dest_graph.add_edge(parent_in_dest_id, b, ()) + // let parent_in_source = source_graph.node_weight(*parent_in_source).unwrap(); } - }, - |_edge_index, _edge_weight| { - Some(()) + // let cur_node_id = id_set.entry(cur_source_id).or_insert_with(|| id_set.insert(cur_source_id)); } - )) + } + + Ok(dest_graph) + // FIXME: Wrong behavior on indirect dependencies. + // Ok(self.imports.borrow().graph.filter_map( + // |_node_index, node_weight| { + // match node_weight { + // // FIXME: The "tops" of the digraph are `Relative()`, not `Canister()` + // // TODO: `get_first_canister_with_name` is a hack + // MotokoImport::Canister(name) => Some(self.get_first_canister_with_name(&name).unwrap().canister_id()), + // _ => None, + // } + // }, + // |_edge_index, _edge_weight| { + // Some(()) + // } + // )) } #[context("Failed step_prebuild_all.")] From b0da56f7aa9e75f98ec5a6c3c3a9c04cee24d579 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 12:40:20 +0300 Subject: [PATCH 059/354] comment --- src/dfx/src/lib/models/canister.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 8f18817b5c..dabcd878d8 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -580,6 +580,7 @@ impl CanisterPool { }; // Transform the graph of file dependencies to graph of canister dependencies. // For this do DFS for each of `real_canisters_to_build`. + // TODO: Somebody, adopt this code to `pethgraph`. let source_graph = &self.imports.borrow().graph; let mut dest_graph: DiGraph = DiGraph::new(); let mut dest_id_set = HashMap::new(); From 368db99cf259109f761b12160ba005cc56091124 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 13:57:47 +0300 Subject: [PATCH 060/354] rewriting (does not compile) --- src/dfx/src/lib/graph/mod.rs | 1 + src/dfx/src/lib/graph/traverse_filtered.rs | 40 +++++++++++ src/dfx/src/lib/mod.rs | 1 + src/dfx/src/lib/models/canister.rs | 77 ++++++++++++++-------- 4 files changed, 90 insertions(+), 29 deletions(-) create mode 100644 src/dfx/src/lib/graph/mod.rs create mode 100644 src/dfx/src/lib/graph/traverse_filtered.rs diff --git a/src/dfx/src/lib/graph/mod.rs b/src/dfx/src/lib/graph/mod.rs new file mode 100644 index 0000000000..6b49d4e2b3 --- /dev/null +++ b/src/dfx/src/lib/graph/mod.rs @@ -0,0 +1 @@ +pub mod traverse_filtered; \ No newline at end of file diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs new file mode 100644 index 0000000000..d067fa7e0a --- /dev/null +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -0,0 +1,40 @@ +// TODO: Somebody, adopt this code to `pethgraph`. +use petgraph::{data::DataMap, visit::{Dfs, IntoNeighbors, VisitMap}}; + +pub struct DfsFiltered + // where P: FnMut(&N) -> bool +{ + base: Dfs, + // node_filter: P, +} + +impl DfsFiltered { + pub fn new(base: Dfs) -> Self { + Self { + base + } + } + + pub fn traverse(&mut self, graph: G, predicate: P, call: C) + where C: Fn(&N, &N) -> (), + G: IntoNeighbors + DataMap, + P: Fn(&N) -> bool, + N: Copy + PartialEq, + VM: VisitMap, + { + while let Some(item) = &self.base.next(graph) { + if predicate(item) { + let parent = self.base.stack.iter().rev().find( + |&entry| if let Some(elt) = graph.node_weight(*entry) { + predicate(elt) + } else { + false + } + ); + if let Some(parent) = parent { + call(parent, item); + } + } + } + } +} \ No newline at end of file diff --git a/src/dfx/src/lib/mod.rs b/src/dfx/src/lib/mod.rs index aaca7f7898..75bd3822ef 100644 --- a/src/dfx/src/lib/mod.rs +++ b/src/dfx/src/lib/mod.rs @@ -8,6 +8,7 @@ pub mod diagnosis; pub mod environment; pub mod error; pub mod error_code; +pub mod graph; pub mod ic_attributes; pub mod identity; pub mod info; diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index dabcd878d8..f19fe4c4fc 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -7,7 +7,7 @@ use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::metadata::dfx::DfxMetadata; use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE, DFX}; -use crate::lib::operations::canister; +use crate::lib::graph::traverse_filtered::{self, DfsFiltered}; use crate::lib::wasm::file::{compress_bytes, read_wasm_module}; use crate::util::assets; use anyhow::{anyhow, bail, Context}; @@ -578,39 +578,58 @@ impl CanisterPool { Some(canisters_to_build) => canisters_to_build, None => self.canisters.iter().map(|canister| canister.get_name().to_string()).collect(), }; - // Transform the graph of file dependencies to graph of canister dependencies. - // For this do DFS for each of `real_canisters_to_build`. - // TODO: Somebody, adopt this code to `pethgraph`. + // // Transform the graph of file dependencies to graph of canister dependencies. + // // For this do DFS for each of `real_canisters_to_build`. let source_graph = &self.imports.borrow().graph; let mut dest_graph: DiGraph = DiGraph::new(); let mut dest_id_set = HashMap::new(); - let mut name_to_dest = HashMap::new(); - for start_name in real_canisters_to_build.iter() { - let dest_start = self.get_first_canister_with_name(&start_name).unwrap().canister_id(); - let dest_start = *dest_id_set.entry(dest_start.clone()).or_insert_with(|| dest_graph.add_node(dest_start.clone())); // TODO: always inserts - name_to_dest.insert(start_name, dest_start); - let mut iter = Dfs::new(&source_graph, dest_start); - iter.next(&source_graph); - while let Some(cur_source_id) = iter.next(&source_graph) { - let cur_source_node = source_graph.node_weight(cur_source_id).unwrap(); - if let MotokoImport::Canister(name) = cur_source_node { - let parent_in_source_id = *iter.stack.iter().rev().find( - |&entry| - if let Some(MotokoImport::Canister(_parent_name)) = source_graph.node_weight(*entry) { - true - } else { - false - } - ).unwrap(); - // Both parent and current ancestor are `Canister` dependencies. - let parent_in_dest_id = - name_to_dest.entry(parent_in_source_id).or_insert_with(|| dest_graph.add_node(cur_canister_id)); - dest_graph.add_edge(parent_in_dest_id, b, ()) - // let parent_in_source = source_graph.node_weight(*parent_in_source).unwrap(); + let dfs = Dfs::from_parts(real_canisters_to_build, HashMap::new()); // TODO: Use `FixedBitSet` instead of `HashMap`? + let filtered_dfs = DfsFiltered::new(dfs); + filtered_dfs.traverse( + source_graph, + |s| { + if let Some(MotokoImport::Canister(_parent_name)) = source_graph.node_weight(*entry) { + true + } else { + false } - // let cur_node_id = id_set.entry(cur_source_id).or_insert_with(|| id_set.insert(cur_source_id)); + }, + |parent, child| { + let parent_id = *dest_id_set.entry(parent).or_insert_with(|| dest_graph.add_node(parent)); + let child_id = *dest_id_set.entry(child).or_insert_with(|| dest_graph.add_node(child)); + dest_graph.add_edge(parent_id, child_id, ()); } - } + ); + // let source_graph = &self.imports.borrow().graph; + // let mut dest_graph: DiGraph = DiGraph::new(); + // let mut dest_id_set = HashMap::new(); + // let mut name_to_dest = HashMap::new(); + // for start_name in real_canisters_to_build.iter() { + // let dest_start = self.get_first_canister_with_name(&start_name).unwrap().canister_id(); + // let dest_start = *dest_id_set.entry(dest_start.clone()).or_insert_with(|| dest_graph.add_node(dest_start.clone())); // TODO: always inserts + // name_to_dest.insert(start_name, dest_start); + // let mut iter = Dfs::new(&source_graph, dest_start); + // iter.next(&source_graph); + // while let Some(cur_source_id) = iter.next(&source_graph) { + // let cur_source_node = source_graph.node_weight(cur_source_id).unwrap(); + // if let MotokoImport::Canister(name) = cur_source_node { + // let parent_in_source_id = *iter.stack.iter().rev().find( + // |&entry| + // if let Some(MotokoImport::Canister(_parent_name)) = source_graph.node_weight(*entry) { + // true + // } else { + // false + // } + // ).unwrap(); + // // Both parent and current ancestor are `Canister` dependencies. + // let parent_in_dest_id = + // name_to_dest.entry(parent_in_source_id).or_insert_with(|| dest_graph.add_node(cur_canister_id)); + // dest_graph.add_edge(parent_in_dest_id, b, ()) + // // let parent_in_source = source_graph.node_weight(*parent_in_source).unwrap(); + // } + // // let cur_node_id = id_set.entry(cur_source_id).or_insert_with(|| id_set.insert(cur_source_id)); + // } + // } Ok(dest_graph) // FIXME: Wrong behavior on indirect dependencies. From 8188d45b35db33e22d370803b9d37b74a4ab23ca Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 14:37:55 +0300 Subject: [PATCH 061/354] rewriting (does not compile) --- src/dfx/src/lib/graph/traverse_filtered.rs | 30 +++++++++------------- src/dfx/src/lib/models/canister.rs | 22 ++++++++++------ 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index d067fa7e0a..76d3ed82b0 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -1,37 +1,31 @@ // TODO: Somebody, adopt this code to `pethgraph`. use petgraph::{data::DataMap, visit::{Dfs, IntoNeighbors, VisitMap}}; -pub struct DfsFiltered +pub struct DfsFiltered // where P: FnMut(&N) -> bool { - base: Dfs, + base: Dfs, // node_filter: P, } -impl DfsFiltered { - pub fn new(base: Dfs) -> Self { +impl DfsFiltered { + pub fn new(base: Dfs) -> Self { Self { base } } - pub fn traverse(&mut self, graph: G, predicate: P, call: C) - where C: Fn(&N, &N) -> (), - G: IntoNeighbors + DataMap, - P: Fn(&N) -> bool, - N: Copy + PartialEq, - VM: VisitMap, + pub fn traverse(&mut self, graph: G, predicate: P, call: C) + where C: Fn(&NodeId, &NodeId) -> (), + G: IntoNeighbors + DataMap, + P: Fn(&NodeId) -> bool, + NodeId: Copy + PartialEq, + VM: VisitMap, { while let Some(item) = &self.base.next(graph) { if predicate(item) { - let parent = self.base.stack.iter().rev().find( - |&entry| if let Some(elt) = graph.node_weight(*entry) { - predicate(elt) - } else { - false - } - ); - if let Some(parent) = parent { + let parent = self.base.stack.iter().rev().find(predicate); + if let Some(parent) = &parent { call(parent, item); } } diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index f19fe4c4fc..150d1b3db4 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -578,26 +578,32 @@ impl CanisterPool { Some(canisters_to_build) => canisters_to_build, None => self.canisters.iter().map(|canister| canister.get_name().to_string()).collect(), }; + // let real_canisters_to_build = real_canisters_to_build.iter().collect(); // hack + let source_graph = &self.imports.borrow().graph; + let source_ids = &self.imports.borrow().nodes; + let start: Vec<_> = + real_canisters_to_build.iter().map(|name| &MotokoImport::Canister(name.clone())).collect(); // `clone` is inefficient. + let start = start.into_iter().map(|node| *source_ids.get(node).unwrap()).collect(); // // Transform the graph of file dependencies to graph of canister dependencies. // // For this do DFS for each of `real_canisters_to_build`. - let source_graph = &self.imports.borrow().graph; let mut dest_graph: DiGraph = DiGraph::new(); let mut dest_id_set = HashMap::new(); - let dfs = Dfs::from_parts(real_canisters_to_build, HashMap::new()); // TODO: Use `FixedBitSet` instead of `HashMap`? + let dfs = Dfs::from_parts(start, HashSet::new()); // TODO: Use `FixedBitSet` instead of `HashMap`? let filtered_dfs = DfsFiltered::new(dfs); filtered_dfs.traverse( source_graph, - |s| { - if let Some(MotokoImport::Canister(_parent_name)) = source_graph.node_weight(*entry) { + |&s| { + let source_id = source_graph.node_weight(s); + if let Some(MotokoImport::Canister(_parent_name)) = source_id { true } else { false } }, - |parent, child| { - let parent_id = *dest_id_set.entry(parent).or_insert_with(|| dest_graph.add_node(parent)); - let child_id = *dest_id_set.entry(child).or_insert_with(|| dest_graph.add_node(child)); - dest_graph.add_edge(parent_id, child_id, ()); + |parent_id, child_id| { + let parent_id = *dest_id_set.entry(*parent_id).or_insert_with(|| parent_id); + let child_id = *dest_id_set.entry(*child_id).or_insert_with(|| child_id); + dest_graph.add_edge(*parent_id, *child_id, ()); } ); // let source_graph = &self.imports.borrow().graph; From 6e6593928bd1ae76c9aaeb535809350ddf64d1d6 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 14:44:35 +0300 Subject: [PATCH 062/354] rewriting (does not compile) --- src/dfx/src/lib/graph/traverse_filtered.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 76d3ed82b0..4a2663d5b0 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -15,18 +15,18 @@ impl DfsFiltered { } } - pub fn traverse(&mut self, graph: G, predicate: P, call: C) + pub fn traverse(&mut self, graph: G, mut predicate: P, mut call: C) where C: Fn(&NodeId, &NodeId) -> (), G: IntoNeighbors + DataMap, - P: Fn(&NodeId) -> bool, + P: FnMut(&NodeId) -> bool, NodeId: Copy + PartialEq, VM: VisitMap, { while let Some(item) = &self.base.next(graph) { - if predicate(item) { - let parent = self.base.stack.iter().rev().find(predicate); + if (&mut predicate)(item) { + let parent = self.base.stack.iter().map(|e| *e).rev().find(&mut predicate); if let Some(parent) = &parent { - call(parent, item); + (&mut call)(parent, item); } } } From 31ff714fee27433d307eafde41f8ebba0d83f8aa Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 14:52:13 +0300 Subject: [PATCH 063/354] it compiled (will test) --- src/dfx/src/lib/graph/traverse_filtered.rs | 2 +- src/dfx/src/lib/models/canister.rs | 17 +++++++++-------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 4a2663d5b0..2345f42b06 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -16,7 +16,7 @@ impl DfsFiltered { } pub fn traverse(&mut self, graph: G, mut predicate: P, mut call: C) - where C: Fn(&NodeId, &NodeId) -> (), + where C: FnMut(&NodeId, &NodeId) -> (), G: IntoNeighbors + DataMap, P: FnMut(&NodeId) -> bool, NodeId: Copy + PartialEq, diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 150d1b3db4..5c776b56e0 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -7,7 +7,7 @@ use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::metadata::dfx::DfxMetadata; use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE, DFX}; -use crate::lib::graph::traverse_filtered::{self, DfsFiltered}; +use crate::lib::graph::traverse_filtered::DfsFiltered; use crate::lib::wasm::file::{compress_bytes, read_wasm_module}; use crate::util::assets; use anyhow::{anyhow, bail, Context}; @@ -582,14 +582,15 @@ impl CanisterPool { let source_graph = &self.imports.borrow().graph; let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = - real_canisters_to_build.iter().map(|name| &MotokoImport::Canister(name.clone())).collect(); // `clone` is inefficient. - let start = start.into_iter().map(|node| *source_ids.get(node).unwrap()).collect(); + real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); // `clone` is inefficient. + let start = start.into_iter().map(|node| *source_ids.get(&node).unwrap()).collect(); // // Transform the graph of file dependencies to graph of canister dependencies. // // For this do DFS for each of `real_canisters_to_build`. let mut dest_graph: DiGraph = DiGraph::new(); let mut dest_id_set = HashMap::new(); let dfs = Dfs::from_parts(start, HashSet::new()); // TODO: Use `FixedBitSet` instead of `HashMap`? - let filtered_dfs = DfsFiltered::new(dfs); + let mut filtered_dfs = DfsFiltered::new(dfs); + // let dest_id_set = &mut dest_id_set; filtered_dfs.traverse( source_graph, |&s| { @@ -600,10 +601,10 @@ impl CanisterPool { false } }, - |parent_id, child_id| { - let parent_id = *dest_id_set.entry(*parent_id).or_insert_with(|| parent_id); - let child_id = *dest_id_set.entry(*child_id).or_insert_with(|| child_id); - dest_graph.add_edge(*parent_id, *child_id, ()); + |&parent_id, &child_id| { + let parent_id = *dest_id_set.entry(parent_id).or_insert_with(|| parent_id); + let child_id = *dest_id_set.entry(child_id).or_insert_with(|| child_id); + dest_graph.add_edge(parent_id, child_id, ()); } ); // let source_graph = &self.imports.borrow().graph; From 9cff499373209566c1d5d9c094f3bc87274403bd Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 15:26:14 +0300 Subject: [PATCH 064/354] it compiled (will test) --- src/dfx/src/lib/graph/traverse_filtered.rs | 12 ++++---- src/dfx/src/lib/models/canister.rs | 33 +++++++++++++++++++--- 2 files changed, 35 insertions(+), 10 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 2345f42b06..a34684e2ea 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -19,14 +19,14 @@ impl DfsFiltered { where C: FnMut(&NodeId, &NodeId) -> (), G: IntoNeighbors + DataMap, P: FnMut(&NodeId) -> bool, - NodeId: Copy + PartialEq, + NodeId: Copy + Eq, VM: VisitMap, { - while let Some(item) = &self.base.next(graph) { - if (&mut predicate)(item) { - let parent = self.base.stack.iter().map(|e| *e).rev().find(&mut predicate); - if let Some(parent) = &parent { - (&mut call)(parent, item); + while let Some(source_item_id) = &self.base.next(graph) { + if (&mut predicate)(source_item_id) { + let source_parent_id = self.base.stack.iter().map(|e| *e).rev().find(&mut predicate); + if let Some(source_parent_id) = &source_parent_id { + (&mut call)(source_parent_id, &source_item_id); } } } diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 5c776b56e0..cb598c7840 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -591,6 +591,7 @@ impl CanisterPool { let dfs = Dfs::from_parts(start, HashSet::new()); // TODO: Use `FixedBitSet` instead of `HashMap`? let mut filtered_dfs = DfsFiltered::new(dfs); // let dest_id_set = &mut dest_id_set; + let mut nodes_map = HashMap::new(); // from source graph to dest graph filtered_dfs.traverse( source_graph, |&s| { @@ -601,10 +602,34 @@ impl CanisterPool { false } }, - |&parent_id, &child_id| { - let parent_id = *dest_id_set.entry(parent_id).or_insert_with(|| parent_id); - let child_id = *dest_id_set.entry(child_id).or_insert_with(|| child_id); - dest_graph.add_edge(parent_id, child_id, ()); + |&source_parent_id, &source_child_id| { + // FIXME: Is the chain of `unwrap`s and `panic`s correct? + let parent = source_graph.node_weight(source_parent_id).unwrap(); + let parent_name = match parent { + MotokoImport::Canister(name) => name, + _ => { + panic!("programming error"); + } + }; + let parent_canister = self.get_first_canister_with_name(&parent_name).unwrap().canister_id(); + + let child = source_graph.node_weight(source_child_id).unwrap(); + let child_name = match child { + MotokoImport::Canister(name) => name, + _ => { + panic!("programming error"); + } + }; + let child_canister = self.get_first_canister_with_name(&child_name).unwrap().canister_id(); + + let dest_parent_id = *dest_id_set.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); + nodes_map.insert(source_parent_id, dest_parent_id); + let dest_child_id = *dest_id_set.entry(source_child_id).or_insert_with(|| dest_graph.add_node(child_canister)); + nodes_map.insert(source_child_id, dest_child_id); + nodes_map.entry(source_parent_id).or_insert_with( + || dest_graph.add_node(*dest_graph.node_weight(source_parent_id).unwrap()) // FIXME: `unwrap()`? + ); + dest_graph.add_edge(dest_parent_id, dest_child_id, ()); } ); // let source_graph = &self.imports.borrow().graph; From 0dbb7e5678dd0fb6aae90b1c5cfec85833020723 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 15:37:31 +0300 Subject: [PATCH 065/354] bug fix: Dfs -> Bfs --- src/dfx/src/lib/graph/traverse_filtered.rs | 41 ++++++++-- src/dfx/src/lib/models/canister.rs | 93 +++++++++++----------- 2 files changed, 83 insertions(+), 51 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index a34684e2ea..9b1ab21f14 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -1,20 +1,51 @@ -// TODO: Somebody, adopt this code to `pethgraph`. -use petgraph::{data::DataMap, visit::{Dfs, IntoNeighbors, VisitMap}}; +// TODO: Somebody, adopt this code to `petgraph`. +use petgraph::{data::DataMap, visit::{Bfs, Dfs, IntoNeighbors, VisitMap}}; -pub struct DfsFiltered - // where P: FnMut(&N) -> bool -{ +#[allow(unused)] +pub struct DfsFiltered { base: Dfs, // node_filter: P, } impl DfsFiltered { + #[allow(unused)] pub fn new(base: Dfs) -> Self { Self { base } } + #[allow(unused)] + pub fn traverse(&mut self, graph: G, mut predicate: P, mut call: C) + where C: FnMut(&NodeId, &NodeId) -> (), + G: IntoNeighbors + DataMap, + P: FnMut(&NodeId) -> bool, + NodeId: Copy + Eq, + VM: VisitMap, + { + while let Some(source_item_id) = &self.base.next(graph) { + if (&mut predicate)(source_item_id) { + let source_parent_id = self.base.stack.iter().map(|e| *e).rev().find(&mut predicate); + if let Some(source_parent_id) = &source_parent_id { + (&mut call)(source_parent_id, &source_item_id); + } + } + } + } +} + +pub struct BfsFiltered { + base: Bfs, + // node_filter: P, +} + +impl BfsFiltered { + pub fn new(base: Bfs) -> Self { + Self { + base + } + } + pub fn traverse(&mut self, graph: G, mut predicate: P, mut call: C) where C: FnMut(&NodeId, &NodeId) -> (), G: IntoNeighbors + DataMap, diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index cb598c7840..71e2afdb42 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -5,9 +5,9 @@ use crate::lib::builders::{ use crate::lib::canister_info::CanisterInfo; use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; +use crate::lib::graph::traverse_filtered::BfsFiltered; use crate::lib::metadata::dfx::DfxMetadata; use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE, DFX}; -use crate::lib::graph::traverse_filtered::DfsFiltered; use crate::lib::wasm::file::{compress_bytes, read_wasm_module}; use crate::util::assets; use anyhow::{anyhow, bail, Context}; @@ -22,7 +22,7 @@ use ic_wasm::metadata::{add_metadata, remove_metadata, Kind}; use ic_wasm::optimize::OptLevel; use itertools::Itertools; use petgraph::graph::{DiGraph, NodeIndex}; -use petgraph::visit::Dfs; +use petgraph::visit::Bfs; use rand::{thread_rng, RngCore}; use slog::{error, info, trace, warn, Logger}; use std::cell::RefCell; @@ -583,55 +583,56 @@ impl CanisterPool { let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); // `clone` is inefficient. - let start = start.into_iter().map(|node| *source_ids.get(&node).unwrap()).collect(); + let start = start.into_iter().map(|node| *source_ids.get(&node).unwrap()); // // Transform the graph of file dependencies to graph of canister dependencies. // // For this do DFS for each of `real_canisters_to_build`. let mut dest_graph: DiGraph = DiGraph::new(); let mut dest_id_set = HashMap::new(); - let dfs = Dfs::from_parts(start, HashSet::new()); // TODO: Use `FixedBitSet` instead of `HashMap`? - let mut filtered_dfs = DfsFiltered::new(dfs); - // let dest_id_set = &mut dest_id_set; - let mut nodes_map = HashMap::new(); // from source graph to dest graph - filtered_dfs.traverse( - source_graph, - |&s| { - let source_id = source_graph.node_weight(s); - if let Some(MotokoImport::Canister(_parent_name)) = source_id { - true - } else { - false - } - }, - |&source_parent_id, &source_child_id| { - // FIXME: Is the chain of `unwrap`s and `panic`s correct? - let parent = source_graph.node_weight(source_parent_id).unwrap(); - let parent_name = match parent { - MotokoImport::Canister(name) => name, - _ => { - panic!("programming error"); - } - }; - let parent_canister = self.get_first_canister_with_name(&parent_name).unwrap().canister_id(); - - let child = source_graph.node_weight(source_child_id).unwrap(); - let child_name = match child { - MotokoImport::Canister(name) => name, - _ => { - panic!("programming error"); + for start_node in start { + let bfs = Bfs::new(&source_graph, start_node); + let mut filtered_bfs = BfsFiltered::new(bfs); + let mut nodes_map = HashMap::new(); // from source graph to dest graph + filtered_bfs.traverse( + source_graph, + |&s| { + let source_id = source_graph.node_weight(s); + if let Some(MotokoImport::Canister(_parent_name)) = source_id { + true + } else { + false } - }; - let child_canister = self.get_first_canister_with_name(&child_name).unwrap().canister_id(); - - let dest_parent_id = *dest_id_set.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); - nodes_map.insert(source_parent_id, dest_parent_id); - let dest_child_id = *dest_id_set.entry(source_child_id).or_insert_with(|| dest_graph.add_node(child_canister)); - nodes_map.insert(source_child_id, dest_child_id); - nodes_map.entry(source_parent_id).or_insert_with( - || dest_graph.add_node(*dest_graph.node_weight(source_parent_id).unwrap()) // FIXME: `unwrap()`? - ); - dest_graph.add_edge(dest_parent_id, dest_child_id, ()); - } - ); + }, + |&source_parent_id, &source_child_id| { + // FIXME: Is the chain of `unwrap`s and `panic`s correct? + let parent = source_graph.node_weight(source_parent_id).unwrap(); + let parent_name = match parent { + MotokoImport::Canister(name) => name, + _ => { + panic!("programming error"); + } + }; + let parent_canister = self.get_first_canister_with_name(&parent_name).unwrap().canister_id(); + + let child = source_graph.node_weight(source_child_id).unwrap(); + let child_name = match child { + MotokoImport::Canister(name) => name, + _ => { + panic!("programming error"); + } + }; + let child_canister = self.get_first_canister_with_name(&child_name).unwrap().canister_id(); + + let dest_parent_id = *dest_id_set.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); + nodes_map.insert(source_parent_id, dest_parent_id); + let dest_child_id = *dest_id_set.entry(source_child_id).or_insert_with(|| dest_graph.add_node(child_canister)); + nodes_map.insert(source_child_id, dest_child_id); + nodes_map.entry(source_parent_id).or_insert_with( + || dest_graph.add_node(*dest_graph.node_weight(source_parent_id).unwrap()) // FIXME: `unwrap()`? + ); + dest_graph.add_edge(dest_parent_id, dest_child_id, ()); + } + ); + } // let source_graph = &self.imports.borrow().graph; // let mut dest_graph: DiGraph = DiGraph::new(); // let mut dest_id_set = HashMap::new(); From dab915e75e4023e0006b3d56b54a70fae5f19ef6 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 15:53:37 +0300 Subject: [PATCH 066/354] bug fix (untested) --- src/dfx/src/lib/models/canister.rs | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 71e2afdb42..4231d424ee 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -583,15 +583,25 @@ impl CanisterPool { let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); // `clone` is inefficient. - let start = start.into_iter().map(|node| *source_ids.get(&node).unwrap()); - // // Transform the graph of file dependencies to graph of canister dependencies. - // // For this do DFS for each of `real_canisters_to_build`. + let start: Vec<_> = start.into_iter().map(|node| *source_ids.get(&node).unwrap()).collect(); + // Transform the graph of file dependencies to graph of canister dependencies. + // For this do DFS for each of `real_canisters_to_build`. let mut dest_graph: DiGraph = DiGraph::new(); let mut dest_id_set = HashMap::new(); - for start_node in start { + for start_node in start.into_iter() { + // Initialize "mirrors" of the parent node of source graph in dest graph: + let parent = source_graph.node_weight(start_node).unwrap(); + let parent_name = match parent { + MotokoImport::Canister(name) => name, + _ => { + panic!("programming error"); + } + }; + let parent_canister = self.get_first_canister_with_name(&parent_name).unwrap().canister_id(); + let _ = *dest_id_set.entry(start_node).or_insert_with(|| dest_graph.add_node(parent_canister)); + let bfs = Bfs::new(&source_graph, start_node); let mut filtered_bfs = BfsFiltered::new(bfs); - let mut nodes_map = HashMap::new(); // from source graph to dest graph filtered_bfs.traverse( source_graph, |&s| { @@ -623,12 +633,7 @@ impl CanisterPool { let child_canister = self.get_first_canister_with_name(&child_name).unwrap().canister_id(); let dest_parent_id = *dest_id_set.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); - nodes_map.insert(source_parent_id, dest_parent_id); let dest_child_id = *dest_id_set.entry(source_child_id).or_insert_with(|| dest_graph.add_node(child_canister)); - nodes_map.insert(source_child_id, dest_child_id); - nodes_map.entry(source_parent_id).or_insert_with( - || dest_graph.add_node(*dest_graph.node_weight(source_parent_id).unwrap()) // FIXME: `unwrap()`? - ); dest_graph.add_edge(dest_parent_id, dest_child_id, ()); } ); From c52b31733485f8a5fe1ef273c087904b9e492751 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 15:56:24 +0300 Subject: [PATCH 067/354] removed tracing --- src/dfx/src/lib/models/canister.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 4231d424ee..ba9e7bd7eb 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -809,7 +809,6 @@ impl CanisterPool { trace!(log, "Building dependencies graph."); let graph = self.build_dependencies_graph(build_config.canisters_to_build.clone())?; // TODO: Can `clone` be eliminated? - println!("GRAPH: {:?}", graph); let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { Some(canister_id) => match self.get_canister_info(canister_id) { From e611bc2eb6d56c048eb0e939d38e5cd0a0548f32 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 15:56:58 +0300 Subject: [PATCH 068/354] removed tracing --- src/dfx/src/lib/models/canister.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index ba9e7bd7eb..70f0a0ab78 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -824,7 +824,6 @@ impl CanisterPool { .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. .map(|idx| *graph.node_weight(*idx).unwrap()) .collect(); - println!("ORDER: {:?}", order.iter().map(|c| c.to_text()).collect::>()); // let canisters_to_build = Bfs::new(graph, start); // let canisters_to_build = self.canisters_to_build(build_config); // FIXME From d7b6052bb9645d7d54b47494f30697a02021d0f6 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 16:15:14 +0300 Subject: [PATCH 069/354] bug fix --- src/dfx/src/lib/builders/motoko.rs | 2 +- src/dfx/src/lib/models/canister.rs | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index e7ae95ab15..f989bae47d 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -92,7 +92,7 @@ fn get_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac } let parent_node_index = *imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); let child_node_index = *imports.nodes.entry(child.clone()).or_insert_with(|| imports.graph.add_node(child.clone())); - imports.graph.add_edge(parent_node_index, child_node_index, ()); + imports.graph.update_edge(parent_node_index, child_node_index, ()); } Ok(()) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 70f0a0ab78..2d94e30d28 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -634,7 +634,7 @@ impl CanisterPool { let dest_parent_id = *dest_id_set.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); let dest_child_id = *dest_id_set.entry(source_child_id).or_insert_with(|| dest_graph.add_node(child_canister)); - dest_graph.add_edge(dest_parent_id, dest_child_id, ()); + dest_graph.update_edge(dest_parent_id, dest_child_id, ()); } ); } @@ -824,6 +824,7 @@ impl CanisterPool { .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. .map(|idx| *graph.node_weight(*idx).unwrap()) .collect(); + println!("ORDER: {:?}", order.iter().map(|c| c.to_text()).collect::>()); // let canisters_to_build = Bfs::new(graph, start); // let canisters_to_build = self.canisters_to_build(build_config); // FIXME From 357a16ec9274f43c63125bf2d30ce43fd0597ab7 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 17:15:48 +0300 Subject: [PATCH 070/354] bug fix --- src/dfx/src/lib/graph/traverse_filtered.rs | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 9b1ab21f14..b0e21ca6d7 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -1,3 +1,5 @@ +use std::iter::once; + // TODO: Somebody, adopt this code to `petgraph`. use petgraph::{data::DataMap, visit::{Bfs, Dfs, IntoNeighbors, VisitMap}}; @@ -25,7 +27,7 @@ impl DfsFiltered { { while let Some(source_item_id) = &self.base.next(graph) { if (&mut predicate)(source_item_id) { - let source_parent_id = self.base.stack.iter().map(|e| *e).rev().find(&mut predicate); + let source_parent_id = self.base.stack.iter().map(|e| *e).rev().find(&mut predicate); // FIXME: `rev()` here? if let Some(source_parent_id) = &source_parent_id { (&mut call)(source_parent_id, &source_item_id); } @@ -50,14 +52,17 @@ impl BfsFiltered { where C: FnMut(&NodeId, &NodeId) -> (), G: IntoNeighbors + DataMap, P: FnMut(&NodeId) -> bool, - NodeId: Copy + Eq, + NodeId: Copy + Eq + std::fmt::Debug, // TODO: Remove debug. VM: VisitMap, { - while let Some(source_item_id) = &self.base.next(graph) { - if (&mut predicate)(source_item_id) { - let source_parent_id = self.base.stack.iter().map(|e| *e).rev().find(&mut predicate); - if let Some(source_parent_id) = &source_parent_id { - (&mut call)(source_parent_id, &source_item_id); + if let Some(first_id) = self.base.next(graph) { + while let Some(source_child_id) = &self.base.next(graph) { + if (&mut predicate)(source_child_id) { + let source_parent_id = self.base.stack.iter().map(|e| *e).chain(once(first_id)).find(&mut predicate); + if let Some(source_parent_id) = &source_parent_id { + println!("YYY: {:?} => {:?}", source_parent_id, &source_child_id); + (&mut call)(source_parent_id, &source_child_id); + } } } } From 3b60ab9e93ab1b2f544f0525bcde5f40dee3edce Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 17:16:02 +0300 Subject: [PATCH 071/354] removed unused import --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 2d94e30d28..b0678040ee 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -606,7 +606,7 @@ impl CanisterPool { source_graph, |&s| { let source_id = source_graph.node_weight(s); - if let Some(MotokoImport::Canister(_parent_name)) = source_id { + if let Some(MotokoImport::Canister(_)) = source_id { true } else { false From a26a647006889908346a48a039b5db243c0c442d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 17:17:59 +0300 Subject: [PATCH 072/354] comments and removed tracing --- src/dfx/src/lib/graph/traverse_filtered.rs | 2 +- src/dfx/src/lib/models/canister.rs | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index b0e21ca6d7..77ce07903d 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -58,9 +58,9 @@ impl BfsFiltered { if let Some(first_id) = self.base.next(graph) { while let Some(source_child_id) = &self.base.next(graph) { if (&mut predicate)(source_child_id) { + // TODO: Create a `petgraph` issue asking to explain the next line of code workings. let source_parent_id = self.base.stack.iter().map(|e| *e).chain(once(first_id)).find(&mut predicate); if let Some(source_parent_id) = &source_parent_id { - println!("YYY: {:?} => {:?}", source_parent_id, &source_child_id); (&mut call)(source_parent_id, &source_child_id); } } diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index b0678040ee..f67b5732ee 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -824,7 +824,6 @@ impl CanisterPool { .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. .map(|idx| *graph.node_weight(*idx).unwrap()) .collect(); - println!("ORDER: {:?}", order.iter().map(|c| c.to_text()).collect::>()); // let canisters_to_build = Bfs::new(graph, start); // let canisters_to_build = self.canisters_to_build(build_config); // FIXME From 760ec44b493bb6d08eaca92308ac48c4e78cb461 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 17:23:44 +0300 Subject: [PATCH 073/354] removed comment --- src/dfx/src/lib/models/canister.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index f67b5732ee..40930b400b 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -825,8 +825,6 @@ impl CanisterPool { .map(|idx| *graph.node_weight(*idx).unwrap()) .collect(); - // let canisters_to_build = Bfs::new(graph, start); - // let canisters_to_build = self.canisters_to_build(build_config); // FIXME // TODO: The next line is slow and confusing code. let canisters_to_build: Vec<&Arc> = self.canisters.iter().filter(|c| order.contains(&c.canister_id())).collect(); let mut result = Vec::new(); From fc6fbc6d52612fcc31bfc1d68fb30f2c19ace335 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 17:44:34 +0300 Subject: [PATCH 074/354] bug fix --- src/dfx/src/lib/builders/motoko.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index f989bae47d..40f8b54113 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -200,8 +200,9 @@ impl CanisterBuilder for MotokoBuilder { if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { let wasm_file_time = wasm_file_metadata.modified()?; let mut imports = pool.imports.borrow_mut(); - let start = if let Some(node_index) = imports.nodes.get(&MotokoImport::Relative(motoko_info.get_main_path().to_path_buf())) { - *node_index + // TODO: ineffective to_string() + let start = if let Some(node_index) = imports.nodes.get(&MotokoImport::Canister(canister_info.get_name().to_string())) { + *node_index } else { let node = MotokoImport::Relative(motoko_info.get_main_path().to_path_buf()); let node_index = imports.graph.add_node(node.clone()); From d8d41c0c9edf4c6bf5adb8b8e45e33841d3a4dca Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 18:05:16 +0300 Subject: [PATCH 075/354] bug fix (not tested) --- src/dfx/src/lib/builders/motoko.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 40f8b54113..8f67042eaf 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -59,13 +59,10 @@ fn get_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac let parent = if let Some(top) = top { MotokoImport::Canister(top.get_name().to_string()) // a little inefficient } else { + println!("FILE: {}", file.to_path_buf().to_str().unwrap()); MotokoImport::Relative(file.to_path_buf()) }; - if imports.nodes.contains_key(&parent) { - return Ok(()); - } - let parent_node_index = *imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); - imports.nodes.insert(parent.clone(), parent_node_index); + imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); let mut command = cache.get_binary_command("moc")?; let command = command.arg("--print-deps").arg(file); @@ -200,10 +197,13 @@ impl CanisterBuilder for MotokoBuilder { if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { let wasm_file_time = wasm_file_metadata.modified()?; let mut imports = pool.imports.borrow_mut(); + println!("NODES: {:?}", imports.nodes); + println!("NAME: {}", canister_info.get_name().to_string()); // TODO: ineffective to_string() let start = if let Some(node_index) = imports.nodes.get(&MotokoImport::Canister(canister_info.get_name().to_string())) { - *node_index + *node_index } else { + println!("XILE: {}", motoko_info.get_main_path().to_str().unwrap()); let node = MotokoImport::Relative(motoko_info.get_main_path().to_path_buf()); let node_index = imports.graph.add_node(node.clone()); imports.nodes.insert(node, node_index); From 7ab04e9ae7cd34e88885cfdcdbacaa4b33036889 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 18:07:08 +0300 Subject: [PATCH 076/354] removed tracing --- src/dfx/src/lib/builders/motoko.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 8f67042eaf..980986f483 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -59,7 +59,6 @@ fn get_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac let parent = if let Some(top) = top { MotokoImport::Canister(top.get_name().to_string()) // a little inefficient } else { - println!("FILE: {}", file.to_path_buf().to_str().unwrap()); MotokoImport::Relative(file.to_path_buf()) }; imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); From 030f9af902515edd70d60acde5d2cb547c200b40 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 18:16:18 +0300 Subject: [PATCH 077/354] removed tracing --- src/dfx/src/lib/builders/motoko.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 980986f483..14f53c4b49 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -196,8 +196,6 @@ impl CanisterBuilder for MotokoBuilder { if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { let wasm_file_time = wasm_file_metadata.modified()?; let mut imports = pool.imports.borrow_mut(); - println!("NODES: {:?}", imports.nodes); - println!("NAME: {}", canister_info.get_name().to_string()); // TODO: ineffective to_string() let start = if let Some(node_index) = imports.nodes.get(&MotokoImport::Canister(canister_info.get_name().to_string())) { *node_index From bfdcfc29bf6c660a4f7768507b059505a236819f Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 19:51:17 +0300 Subject: [PATCH 078/354] removed commented out code --- src/dfx/src/lib/models/canister.rs | 44 ------------------------------ 1 file changed, 44 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 40930b400b..fa936d6c72 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -638,52 +638,8 @@ impl CanisterPool { } ); } - // let source_graph = &self.imports.borrow().graph; - // let mut dest_graph: DiGraph = DiGraph::new(); - // let mut dest_id_set = HashMap::new(); - // let mut name_to_dest = HashMap::new(); - // for start_name in real_canisters_to_build.iter() { - // let dest_start = self.get_first_canister_with_name(&start_name).unwrap().canister_id(); - // let dest_start = *dest_id_set.entry(dest_start.clone()).or_insert_with(|| dest_graph.add_node(dest_start.clone())); // TODO: always inserts - // name_to_dest.insert(start_name, dest_start); - // let mut iter = Dfs::new(&source_graph, dest_start); - // iter.next(&source_graph); - // while let Some(cur_source_id) = iter.next(&source_graph) { - // let cur_source_node = source_graph.node_weight(cur_source_id).unwrap(); - // if let MotokoImport::Canister(name) = cur_source_node { - // let parent_in_source_id = *iter.stack.iter().rev().find( - // |&entry| - // if let Some(MotokoImport::Canister(_parent_name)) = source_graph.node_weight(*entry) { - // true - // } else { - // false - // } - // ).unwrap(); - // // Both parent and current ancestor are `Canister` dependencies. - // let parent_in_dest_id = - // name_to_dest.entry(parent_in_source_id).or_insert_with(|| dest_graph.add_node(cur_canister_id)); - // dest_graph.add_edge(parent_in_dest_id, b, ()) - // // let parent_in_source = source_graph.node_weight(*parent_in_source).unwrap(); - // } - // // let cur_node_id = id_set.entry(cur_source_id).or_insert_with(|| id_set.insert(cur_source_id)); - // } - // } Ok(dest_graph) - // FIXME: Wrong behavior on indirect dependencies. - // Ok(self.imports.borrow().graph.filter_map( - // |_node_index, node_weight| { - // match node_weight { - // // FIXME: The "tops" of the digraph are `Relative()`, not `Canister()` - // // TODO: `get_first_canister_with_name` is a hack - // MotokoImport::Canister(name) => Some(self.get_first_canister_with_name(&name).unwrap().canister_id()), - // _ => None, - // } - // }, - // |_edge_index, _edge_weight| { - // Some(()) - // } - // )) } #[context("Failed step_prebuild_all.")] From d3a44e1e8221b168793dbdc2782809d82fa2e6be Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 21:15:52 +0300 Subject: [PATCH 079/354] comment --- src/dfx/src/lib/builders/motoko.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 14f53c4b49..cf00547cbb 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -132,6 +132,8 @@ impl CanisterBuilder for MotokoBuilder { } } + /// TODO: It supports Make-like dependencies for build, but not for "Post processing candid file"/"Shrinking WASM"/"Attaching metadata" + /// Ideally, should make inter-canister dependencies to rely on `.did` file changed or not. #[context("Failed to build Motoko canister '{}'.", canister_info.get_name())] fn build( &self, From 6819d531e5747fd722be23731150b46afaf2bb2b Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 23:17:26 +0300 Subject: [PATCH 080/354] bug fix (creates a new bug) --- src/dfx/src/lib/models/canister.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index fa936d6c72..8a374cdf45 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -2,6 +2,7 @@ use crate::lib::builders::{ custom_download, BuildConfig, BuildOutput, BuilderPool, CanisterBuilder, IdlBuildOutput, WasmBuildOutput, }; +use crate::lib::canister_info::motoko::MotokoCanisterInfo; use crate::lib::canister_info::CanisterInfo; use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; @@ -576,13 +577,22 @@ impl CanisterPool { let real_canisters_to_build = match canisters_to_build { Some(canisters_to_build) => canisters_to_build, - None => self.canisters.iter().map(|canister| canister.get_name().to_string()).collect(), + None => self.canisters.iter().filter_map( + |canister| if canister.get_info().as_info::().is_ok() { // TODO: Isn't this check too strong? We can depend on a Rust canister for instance. + Some(canister.get_name().to_string()) + } else { + None + }).collect(), }; // let real_canisters_to_build = real_canisters_to_build.iter().collect(); // hack let source_graph = &self.imports.borrow().graph; let source_ids = &self.imports.borrow().nodes; + println!("source_ids: {:?}", source_ids.keys()); + println!("real: {:?}", real_canisters_to_build); let start: Vec<_> = real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); // `clone` is inefficient. + // FIXME: Next line may fail on `dfx build -vv --all`. + println!("start: {:?}", start); let start: Vec<_> = start.into_iter().map(|node| *source_ids.get(&node).unwrap()).collect(); // Transform the graph of file dependencies to graph of canister dependencies. // For this do DFS for each of `real_canisters_to_build`. From cadd9d90836cd74310f44133eb0a51821d408fc6 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 23:39:49 +0300 Subject: [PATCH 081/354] bug fix (not tested) --- src/dfx/src/lib/models/canister.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 8a374cdf45..2979f1b22c 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -575,16 +575,21 @@ impl CanisterPool { } } - let real_canisters_to_build = match canisters_to_build { - Some(canisters_to_build) => canisters_to_build, + println!("cans: {:?}", canisters_to_build); + println!("selfcans: {:?}", self.canisters.iter().map(|canister| canister.get_name().to_string()).collect::>()); + let real_canisters_to_build: Vec<_> = match canisters_to_build { + // TODO: (In below branches) isn't this check too strong? We can depend on a Rust canister for instance. + Some(canisters_to_build) => + canisters_to_build.into_iter().filter( + |name| self.get_first_canister_with_name(&name).unwrap().get_info().as_info::().is_ok() + ).collect(), None => self.canisters.iter().filter_map( - |canister| if canister.get_info().as_info::().is_ok() { // TODO: Isn't this check too strong? We can depend on a Rust canister for instance. + |canister| if canister.get_info().as_info::().is_ok() { Some(canister.get_name().to_string()) } else { None }).collect(), }; - // let real_canisters_to_build = real_canisters_to_build.iter().collect(); // hack let source_graph = &self.imports.borrow().graph; let source_ids = &self.imports.borrow().nodes; println!("source_ids: {:?}", source_ids.keys()); From 0e13b8b6eadcbfd0b04687ec1e4aaf16df8f7fe9 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 23:44:20 +0300 Subject: [PATCH 082/354] removed tracing --- src/dfx/src/lib/models/canister.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 2979f1b22c..255191b8b5 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -575,8 +575,6 @@ impl CanisterPool { } } - println!("cans: {:?}", canisters_to_build); - println!("selfcans: {:?}", self.canisters.iter().map(|canister| canister.get_name().to_string()).collect::>()); let real_canisters_to_build: Vec<_> = match canisters_to_build { // TODO: (In below branches) isn't this check too strong? We can depend on a Rust canister for instance. Some(canisters_to_build) => @@ -592,8 +590,6 @@ impl CanisterPool { }; let source_graph = &self.imports.borrow().graph; let source_ids = &self.imports.borrow().nodes; - println!("source_ids: {:?}", source_ids.keys()); - println!("real: {:?}", real_canisters_to_build); let start: Vec<_> = real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); // `clone` is inefficient. // FIXME: Next line may fail on `dfx build -vv --all`. From 3b6ca0f01b660e3e6e9530b3a70cc0287109c0c7 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 16 Apr 2024 23:49:22 +0300 Subject: [PATCH 083/354] removed tracing and comment --- src/dfx/src/lib/models/canister.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 255191b8b5..2099d1c1e4 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -592,8 +592,6 @@ impl CanisterPool { let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); // `clone` is inefficient. - // FIXME: Next line may fail on `dfx build -vv --all`. - println!("start: {:?}", start); let start: Vec<_> = start.into_iter().map(|node| *source_ids.get(&node).unwrap()).collect(); // Transform the graph of file dependencies to graph of canister dependencies. // For this do DFS for each of `real_canisters_to_build`. From 9c4b04f04bfa206de6ca4c6e360ed91bafb74b44 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 17 Apr 2024 01:50:41 +0300 Subject: [PATCH 084/354] TODO comment --- src/dfx/src/lib/builders/motoko.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index cf00547cbb..6621302533 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -45,6 +45,7 @@ impl MotokoBuilder { // TODO: Rename this function. // TODO: Is `unwrap()` in the next line correct? +// TODO: We don't need library dependencies, because updated lib is always in a new dir. Speedup removing library dependencies. #[context("Failed to find imports for canister at '{}'.", info.as_info::().unwrap().get_main_path().display())] fn get_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { let motoko_info = info.as_info::()?; From f6ac336e89d6384c733d249331965b830d609f68 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 17 Apr 2024 15:55:33 +0300 Subject: [PATCH 085/354] misc --- src/dfx/src/lib/builders/motoko.rs | 53 ++++++++++++++++-------------- src/dfx/src/lib/models/canister.rs | 2 +- 2 files changed, 29 insertions(+), 26 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 6621302533..a958702f21 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -9,8 +9,7 @@ use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE}; use crate::lib::models::canister::{CanisterPool, ImportsTracker, MotokoImport}; use crate::lib::package_arguments::{self, PackageArguments}; use crate::util::assets::management_idl; -use crate::lib::builders::bail; -use anyhow::{anyhow, Context}; +use anyhow::Context; use candid::Principal as CanisterId; use dfx_core::config::cache::Cache; use dfx_core::config::model::dfinity::{MetadataVisibility, Profile}; @@ -65,6 +64,7 @@ fn get_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); let mut command = cache.get_binary_command("moc")?; + println!("FILE: {}", file.as_os_str().to_str().unwrap()); let command = command.arg("--print-deps").arg(file); let output = command .output() @@ -72,6 +72,7 @@ fn get_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac let output = String::from_utf8_lossy(&output.stdout); for line in output.lines() { + println!("LINE: {}", line); let child = MotokoImport::try_from(line).context("Failed to create MotokoImport.")?; match &child { MotokoImport::Relative(path) => { @@ -240,29 +241,31 @@ impl CanisterBuilder for MotokoBuilder { None } } - MotokoImport::Lib(path) => { - let i = path.find('/'); - let pre_path = if let Some(i) = i { - let expanded = Path::new( - package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? - ); - expanded.join(&path[i+1..]) - } else { - Path::new(path.as_str()).to_owned() - }; - let path2 = pre_path.to_str().unwrap().to_owned() + ".mo"; - let path2 = path2.to_string(); - let path2 = Path::new(&path2); - if path2.exists() { // TODO: Is it correct order of two variants? - Some(Path::new(path2).to_owned()) - } else { - let path3 = pre_path.join(Path::new("lib.mo")); - if path3.exists() { - Some(path3.to_owned()) - } else { - bail!("source file has been deleted"); - } - } + MotokoImport::Lib(_path) => { + // Skip libs, all changes by package managers don't modify existing directories but create new ones. + continue; + // let i = path.find('/'); + // let pre_path = if let Some(i) = i { + // let expanded = Path::new( + // package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? + // ); + // expanded.join(&path[i+1..]) + // } else { + // Path::new(path.as_str()).to_owned() + // }; + // let path2 = pre_path.to_str().unwrap().to_owned() + ".mo"; + // let path2 = path2.to_string(); + // let path2 = Path::new(&path2); + // if path2.exists() { // TODO: Is it correct order of two variants? + // Some(Path::new(path2).to_owned()) + // } else { + // let path3 = pre_path.join(Path::new("lib.mo")); + // if path3.exists() { + // Some(path3.to_owned()) + // } else { + // bail!("source file has been deleted"); + // } + // } } MotokoImport::Relative(path) => { Some(Path::new(&path).to_owned()) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 2099d1c1e4..c837bee3a9 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -443,7 +443,7 @@ fn check_valid_subtype(compiled_idl_path: &Path, specified_idl_path: &Path) -> D pub enum MotokoImport { Canister(String), Ic(String), - Lib(String), + Lib(String), // TODO: Unused, because package manager never update existing files (but create new dirs) Relative(PathBuf), } From 453e6a6e1847ac1a0b3b51d7027019344bb1f3f6 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 17 Apr 2024 15:55:49 +0300 Subject: [PATCH 086/354] removed extensive tracing --- src/dfx/src/lib/builders/motoko.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index a958702f21..5011571d96 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -72,7 +72,6 @@ fn get_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac let output = String::from_utf8_lossy(&output.stdout); for line in output.lines() { - println!("LINE: {}", line); let child = MotokoImport::try_from(line).context("Failed to create MotokoImport.")?; match &child { MotokoImport::Relative(path) => { From a503e563ae3268ec5bd3dc62519449e2395bd43b Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 17 Apr 2024 16:02:53 +0300 Subject: [PATCH 087/354] speedup by caching dependencies data --- src/dfx/src/lib/builders/motoko.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 5011571d96..ef1b3892ad 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -61,10 +61,13 @@ fn get_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac } else { MotokoImport::Relative(file.to_path_buf()) }; - imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); + if let Some(_) = imports.nodes.get(&parent) { // The item is already in the graph. + return Ok(()); + } else { + imports.nodes.insert(parent.clone(), imports.graph.add_node(parent.clone()),); + } let mut command = cache.get_binary_command("moc")?; - println!("FILE: {}", file.as_os_str().to_str().unwrap()); let command = command.arg("--print-deps").arg(file); let output = command .output() From 7b5fe36bb23171d8e1bfff0ab263c3a2b5082d46 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 01:28:10 +0300 Subject: [PATCH 088/354] option not to deploy a canister --- src/dfx-core/src/config/model/dfinity.rs | 11 ++++++++++- .../src/lib/operations/canister/deploy_canisters.rs | 9 ++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/src/dfx-core/src/config/model/dfinity.rs b/src/dfx-core/src/config/model/dfinity.rs index 0567ef7dfc..7eabcc36eb 100644 --- a/src/dfx-core/src/config/model/dfinity.rs +++ b/src/dfx-core/src/config/model/dfinity.rs @@ -251,6 +251,11 @@ pub struct ConfigCanistersCanister { #[serde(default)] pub dependencies: Vec, + /// # Deploy + /// `false` value means not to deploy this canister (supposed use: canister that are created by other canisters). + #[serde(default = "default_true")] + pub deploy: bool, + /// # Force Frontend URL /// Mostly unused. /// If this value is not null, a frontend URL is displayed after deployment even if the canister type is not 'asset'. @@ -316,6 +321,10 @@ pub struct ConfigCanistersCanister { pub init_arg: Option, } +fn default_true() -> bool { + true +} + #[derive(Clone, Debug, Serialize, JsonSchema)] #[serde(tag = "type", rename_all = "snake_case")] pub enum CanisterTypeProperties { @@ -929,7 +938,7 @@ impl ConfigInterface { .reserved_cycles_limit) } - fn get_canister_config( + pub fn get_canister_config( &self, canister_name: &str, ) -> Result<&ConfigCanistersCanister, GetCanisterConfigError> { diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 9624ec40a9..0a96aa0f06 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -100,7 +100,14 @@ pub async fn deploy_canisters( let canisters_to_install: Vec = canisters_to_build .clone() .into_iter() - .filter(|canister_name| !pull_canisters_in_config.contains_key(canister_name)) + .filter(|canister_name| + !pull_canisters_in_config.contains_key(canister_name) && + // TODO: This if..else is a hack. + if let Ok(canister_config) = config.get_config().get_canister_config(canister_name) { + canister_config.deploy + } else { + true + }) .collect(); if some_canister.is_some() { From ca5de3f5829f8769d29eedc489a4c4ed6cf6e699 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 01:44:32 +0300 Subject: [PATCH 089/354] fixes --- src/dfx-core/src/config/model/dfinity.rs | 2 +- .../src/lib/operations/canister/deploy_canisters.rs | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/dfx-core/src/config/model/dfinity.rs b/src/dfx-core/src/config/model/dfinity.rs index 7eabcc36eb..19dfae0fc8 100644 --- a/src/dfx-core/src/config/model/dfinity.rs +++ b/src/dfx-core/src/config/model/dfinity.rs @@ -252,7 +252,7 @@ pub struct ConfigCanistersCanister { pub dependencies: Vec, /// # Deploy - /// `false` value means not to deploy this canister (supposed use: canister that are created by other canisters). + /// `false` value means not to deploy this canister by `dfx deploy` (supposed use: canister that are created by other canisters). #[serde(default = "default_true")] pub deploy: bool, diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 0a96aa0f06..5ddf9aac27 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -103,11 +103,12 @@ pub async fn deploy_canisters( .filter(|canister_name| !pull_canisters_in_config.contains_key(canister_name) && // TODO: This if..else is a hack. - if let Ok(canister_config) = config.get_config().get_canister_config(canister_name) { - canister_config.deploy - } else { - true - }) + (some_canister.is_some() || // do deploy a canister that was explicitly specified + if let Ok(canister_config) = config.get_config().get_canister_config(canister_name) { + canister_config.deploy + } else { + true + })) .collect(); if some_canister.is_some() { From 01a851ed34ccf76232117398c65896f1ba094029 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 01:50:24 +0300 Subject: [PATCH 090/354] bug fix --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 5ddf9aac27..a36fb3fe09 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -103,7 +103,7 @@ pub async fn deploy_canisters( .filter(|canister_name| !pull_canisters_in_config.contains_key(canister_name) && // TODO: This if..else is a hack. - (some_canister.is_some() || // do deploy a canister that was explicitly specified + (some_canister.map_or_else(|| None, |canister| Some(canister)) == Some(&canister_name) || // do deploy a canister that was explicitly specified if let Ok(canister_config) = config.get_config().get_canister_config(canister_name) { canister_config.deploy } else { From 32923d6471eb7fc91bc4c2f15ec917ea343c5075 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 01:51:39 +0300 Subject: [PATCH 091/354] refactor --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index a36fb3fe09..343baae2f1 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -103,7 +103,7 @@ pub async fn deploy_canisters( .filter(|canister_name| !pull_canisters_in_config.contains_key(canister_name) && // TODO: This if..else is a hack. - (some_canister.map_or_else(|| None, |canister| Some(canister)) == Some(&canister_name) || // do deploy a canister that was explicitly specified + (some_canister == Some(&canister_name) || // do deploy a canister that was explicitly specified if let Ok(canister_config) = config.get_config().get_canister_config(canister_name) { canister_config.deploy } else { From 0fdc33419185cb8eec2a0fffd99ee5d7a35436bc Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 01:52:21 +0300 Subject: [PATCH 092/354] comment --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 343baae2f1..899700c26b 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -102,8 +102,8 @@ pub async fn deploy_canisters( .into_iter() .filter(|canister_name| !pull_canisters_in_config.contains_key(canister_name) && - // TODO: This if..else is a hack. (some_canister == Some(&canister_name) || // do deploy a canister that was explicitly specified + // TODO: This if..else is a hack. if let Ok(canister_config) = config.get_config().get_canister_config(canister_name) { canister_config.deploy } else { From 410c72511872fc9674feff201919404efe9ed7cd Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 01:54:11 +0300 Subject: [PATCH 093/354] better help --- src/dfx-core/src/config/model/dfinity.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx-core/src/config/model/dfinity.rs b/src/dfx-core/src/config/model/dfinity.rs index 19dfae0fc8..9d4bf28aac 100644 --- a/src/dfx-core/src/config/model/dfinity.rs +++ b/src/dfx-core/src/config/model/dfinity.rs @@ -252,7 +252,7 @@ pub struct ConfigCanistersCanister { pub dependencies: Vec, /// # Deploy - /// `false` value means not to deploy this canister by `dfx deploy` (supposed use: canister that are created by other canisters). + /// `false` value means not to deploy this canister unless it's explicitly specified in the command line (supposed use: canister that are created by other canisters). #[serde(default = "default_true")] pub deploy: bool, From ec0a461b3f6c73dc3c66ff8cbd16180d0abaa6d3 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 02:13:22 +0300 Subject: [PATCH 094/354] CHANGELOG --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7743e10a5a..f2c145d6de 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ # UNRELEASED +### feat: specify canisters not to deploy + +`"deploy": false` canister option makes it not to deploy, unless explicitly specified on the command line. + # 0.20.0 ### fix: set `CANISTER_CANDID_PATH_` properly for remote canisters From c8955c4f237e0cd87913becd334b65ebff66495d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 02:14:36 +0300 Subject: [PATCH 095/354] docs --- docs/cli-reference/dfx-deploy.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/cli-reference/dfx-deploy.mdx b/docs/cli-reference/dfx-deploy.mdx index 72573176c5..6853c57785 100644 --- a/docs/cli-reference/dfx-deploy.mdx +++ b/docs/cli-reference/dfx-deploy.mdx @@ -4,7 +4,7 @@ import { MarkdownChipRow } from "/src/components/Chip/MarkdownChipRow"; -Use the `dfx deploy` command to register, build, and deploy a dapp on the local canister execution environment, on the IC or on a specified testnet. By default, all canisters defined in the project `dfx.json` configuration file are deployed. +Use the `dfx deploy` command to register, build, and deploy a dapp on the local canister execution environment, on the IC or on a specified testnet. By default, all canisters defined in the project `dfx.json` configuration file are deployed, except of the canisters with `"deploy": false` option. This command simplifies the developer workflow by enabling you to run one command instead of running the following commands as separate steps: From 52003bfffd21d50eabd4bf20b0d756a747dbb5d1 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 02:18:18 +0300 Subject: [PATCH 096/354] update dfx.json.schema --- docs/dfx-json-schema.json | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/dfx-json-schema.json b/docs/dfx-json-schema.json index e5899254bb..48eb18a8b8 100644 --- a/docs/dfx-json-schema.json +++ b/docs/dfx-json-schema.json @@ -355,6 +355,12 @@ "type": "string" } }, + "deploy": { + "title": "Deploy", + "description": "`false` value means not to deploy this canister unless it's explicitly specified in the command line (supposed use: canister that are created by other canisters).", + "default": true, + "type": "boolean" + }, "frontend": { "title": "Force Frontend URL", "description": "Mostly unused. If this value is not null, a frontend URL is displayed after deployment even if the canister type is not 'asset'.", From 16ad0db13a1ded7ce9e40e85fc16cfdbd9770390 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 02:29:43 +0300 Subject: [PATCH 097/354] cargo clippy test passed --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 899700c26b..630a371f0a 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -102,7 +102,7 @@ pub async fn deploy_canisters( .into_iter() .filter(|canister_name| !pull_canisters_in_config.contains_key(canister_name) && - (some_canister == Some(&canister_name) || // do deploy a canister that was explicitly specified + (some_canister == Some(canister_name) || // do deploy a canister that was explicitly specified // TODO: This if..else is a hack. if let Ok(canister_config) = config.get_config().get_canister_config(canister_name) { canister_config.deploy From ef7eec941c21571d524dec12f12b6109e780ce4c Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 02:47:23 +0300 Subject: [PATCH 098/354] fefactor --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 630a371f0a..21060c8cf3 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -103,12 +103,9 @@ pub async fn deploy_canisters( .filter(|canister_name| !pull_canisters_in_config.contains_key(canister_name) && (some_canister == Some(canister_name) || // do deploy a canister that was explicitly specified - // TODO: This if..else is a hack. - if let Ok(canister_config) = config.get_config().get_canister_config(canister_name) { - canister_config.deploy - } else { - true - })) + // TODO: This is a hack. + config.get_config().get_canister_config(canister_name).map_or( + true, |canister_config| canister_config.deploy))) .collect(); if some_canister.is_some() { From 96f6f745397e66507856ca48a136ba886d50028b Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 03:36:17 +0300 Subject: [PATCH 099/354] removed tracing --- src/dfx/src/lib/builders/motoko.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index ef1b3892ad..f8ddc149c0 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -206,7 +206,6 @@ impl CanisterBuilder for MotokoBuilder { let start = if let Some(node_index) = imports.nodes.get(&MotokoImport::Canister(canister_info.get_name().to_string())) { *node_index } else { - println!("XILE: {}", motoko_info.get_main_path().to_str().unwrap()); let node = MotokoImport::Relative(motoko_info.get_main_path().to_path_buf()); let node_index = imports.graph.add_node(node.clone()); imports.nodes.insert(node, node_index); From 7615e817f9e68fad08b723d3531f17c03f491026 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 04:34:36 +0300 Subject: [PATCH 100/354] function renamed, comment added --- src/dfx/src/lib/builders/motoko.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index f8ddc149c0..b6b00df6f5 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -42,11 +42,10 @@ impl MotokoBuilder { } } -// TODO: Rename this function. // TODO: Is `unwrap()` in the next line correct? -// TODO: We don't need library dependencies, because updated lib is always in a new dir. Speedup removing library dependencies. +/// Add imports originating from canister `info` to the graph `imports` of dependencies. #[context("Failed to find imports for canister at '{}'.", info.as_info::().unwrap().get_main_path().display())] -fn get_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { +fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { let motoko_info = info.as_info::()?; #[context("Failed recursive dependency detection at {}.", file.display())] fn get_imports_recursive ( @@ -110,7 +109,7 @@ impl CanisterBuilder for MotokoBuilder { pool: &CanisterPool, info: &CanisterInfo, ) -> DfxResult> { - get_imports(self.cache.as_ref(), info, &mut *pool.imports.borrow_mut(), pool)?; + add_imports(self.cache.as_ref(), info, &mut *pool.imports.borrow_mut(), pool)?; let graph = &pool.imports.borrow().graph; match petgraph::algo::toposort(&pool.imports.borrow().graph, None) { @@ -174,7 +173,7 @@ impl CanisterBuilder for MotokoBuilder { std::fs::create_dir_all(idl_dir_path) .with_context(|| format!("Failed to create {}.", idl_dir_path.to_string_lossy()))?; - get_imports(cache.as_ref(), canister_info, &mut *pool.imports.borrow_mut(), pool)?; + add_imports(cache.as_ref(), canister_info, &mut *pool.imports.borrow_mut(), pool)?; // If the management canister is being imported, emit the candid file. if pool.imports.borrow().nodes.contains_key(&MotokoImport::Ic("aaaaa-aa".to_string())) From eb65ab4fa44f7e15a4dd9b38dbe398f1225d13d5 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 04:35:27 +0300 Subject: [PATCH 101/354] removed TODO comment --- src/dfx/src/lib/builders/motoko.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index b6b00df6f5..ace6d1454d 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -42,7 +42,6 @@ impl MotokoBuilder { } } -// TODO: Is `unwrap()` in the next line correct? /// Add imports originating from canister `info` to the graph `imports` of dependencies. #[context("Failed to find imports for canister at '{}'.", info.as_info::().unwrap().get_main_path().display())] fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { From 768f78698cb30835a823e7593c95faed33597e5d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 04:37:54 +0300 Subject: [PATCH 102/354] refactoring --- src/dfx/src/lib/builders/motoko.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index ace6d1454d..e8793856ac 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -121,10 +121,10 @@ impl CanisterBuilder for MotokoBuilder { Err(err) => { let message = match graph.node_weight(err.node_id()) { Some(canister_id) => match canister_id { - MotokoImport::Canister(name) => name.clone(), // TODO: Can deal without `clone()`? - _ => "".to_string(), + MotokoImport::Canister(name) => &name, + _ => "", }, - None => "".to_string(), + None => "", }; return Err(DfxError::new(BuildError::DependencyError(format!( "Found circular dependency: {}", From 6c13c38bf765d9d33eb5f1bd133c9754d2957b60 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 04:39:35 +0300 Subject: [PATCH 103/354] refactor --- src/dfx/src/lib/builders/motoko.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index e8793856ac..e03af28ce2 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -183,12 +183,12 @@ impl CanisterBuilder for MotokoBuilder { let package_arguments = package_arguments::load(cache.as_ref(), motoko_info.get_packtool())?; - let mut package_arguments_map = BTreeMap::::new(); // TODO: Can we deal without cloning strings? + let mut package_arguments_map = BTreeMap::<&str, &str>::new(); { // block let mut i = 0; while i + 3 <= package_arguments.len() { if package_arguments[i] == "--package" { - package_arguments_map.insert(package_arguments[i+1].clone(), package_arguments[i+2].clone()); + package_arguments_map.insert(&package_arguments[i+1], &package_arguments[i+2]); i += 3; } else { i += 1; From c02bb432b7dab916c86401c5013cc87f0300780f Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 04:40:38 +0300 Subject: [PATCH 104/354] removed hard-to-do TODO comment --- src/dfx/src/lib/builders/motoko.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index e03af28ce2..e4c7b98ab9 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -200,7 +200,6 @@ impl CanisterBuilder for MotokoBuilder { if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { let wasm_file_time = wasm_file_metadata.modified()?; let mut imports = pool.imports.borrow_mut(); - // TODO: ineffective to_string() let start = if let Some(node_index) = imports.nodes.get(&MotokoImport::Canister(canister_info.get_name().to_string())) { *node_index } else { From 970d7bd577889cf6ecda02efb335b6c1488b4613 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 04:48:59 +0300 Subject: [PATCH 105/354] more logging --- src/dfx/src/lib/builders/motoko.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index e4c7b98ab9..60e31add18 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -277,7 +277,7 @@ impl CanisterBuilder for MotokoBuilder { }; }; } else { - // println!("Canister {} already compiled.", canister_info.get_name()); // TODO + trace!(self.logger, "Canister {} already compiled.", canister_info.get_name()); return Ok(BuildOutput { // duplicate code canister_id: canister_info .get_canister_id() From a66b2a58d8acd2b7a3e6ec167a49050ea50d77c4 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 04:50:25 +0300 Subject: [PATCH 106/354] removed superfluous Debug --- src/dfx/src/lib/graph/traverse_filtered.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 77ce07903d..15d5b224dd 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -52,7 +52,7 @@ impl BfsFiltered { where C: FnMut(&NodeId, &NodeId) -> (), G: IntoNeighbors + DataMap, P: FnMut(&NodeId) -> bool, - NodeId: Copy + Eq + std::fmt::Debug, // TODO: Remove debug. + NodeId: Copy + Eq, VM: VisitMap, { if let Some(first_id) = self.base.next(graph) { From f6c0849f73e28e19084a0b53f82705d1ef2be811 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 05:05:35 +0300 Subject: [PATCH 107/354] removed unneeded code --- src/dfx/src/lib/graph/traverse_filtered.rs | 37 ++-------------------- 1 file changed, 2 insertions(+), 35 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 15d5b224dd..a467f8ecf4 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -1,40 +1,7 @@ use std::iter::once; -// TODO: Somebody, adopt this code to `petgraph`. -use petgraph::{data::DataMap, visit::{Bfs, Dfs, IntoNeighbors, VisitMap}}; - -#[allow(unused)] -pub struct DfsFiltered { - base: Dfs, - // node_filter: P, -} - -impl DfsFiltered { - #[allow(unused)] - pub fn new(base: Dfs) -> Self { - Self { - base - } - } - - #[allow(unused)] - pub fn traverse(&mut self, graph: G, mut predicate: P, mut call: C) - where C: FnMut(&NodeId, &NodeId) -> (), - G: IntoNeighbors + DataMap, - P: FnMut(&NodeId) -> bool, - NodeId: Copy + Eq, - VM: VisitMap, - { - while let Some(source_item_id) = &self.base.next(graph) { - if (&mut predicate)(source_item_id) { - let source_parent_id = self.base.stack.iter().map(|e| *e).rev().find(&mut predicate); // FIXME: `rev()` here? - if let Some(source_parent_id) = &source_parent_id { - (&mut call)(source_parent_id, &source_item_id); - } - } - } - } -} +// TODO: Somebody, adopt this code (and DFS) to `petgraph`. +use petgraph::{data::DataMap, visit::{Bfs, IntoNeighbors, VisitMap}}; pub struct BfsFiltered { base: Bfs, From d01e08e7209781cca951ecbfce6c69ceea19b31e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 05:07:35 +0300 Subject: [PATCH 108/354] removed hard-to-fix TODO item --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index c837bee3a9..35ac644ff2 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -562,7 +562,7 @@ impl CanisterPool { // println!("canisters_to_build: {:?}", canisters_to_build); for canister in &self.canisters { // a little inefficient let contains = if let Some(canisters_to_build) = &canisters_to_build { - canisters_to_build.iter().contains(&canister.get_info().get_name().to_string()) // TODO: a little slow + canisters_to_build.iter().contains(&canister.get_info().get_name().to_string()) } else { true // because user specified to build all canisters }; From f7b1d5de903c85dbb973886f9e9cd963e0cc2719 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 05:09:58 +0300 Subject: [PATCH 109/354] removed unused code --- src/dfx/src/lib/models/canister.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 35ac644ff2..0dfd05e86a 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -570,8 +570,7 @@ impl CanisterPool { let canister_info = &canister.info; // FIXME: Is `unwrap()` in the next operator correct? // TODO: Ignored return value is a hack. - let _deps: Vec = canister.builder.get_dependencies(self, canister_info)? - .into_iter().filter(|d| *d != canister_info.get_canister_id().unwrap()).collect(); // TODO: This is a hack. + let _deps: Vec = canister.builder.get_dependencies(self, canister_info)?; } } From a02975ff5e9f15c352959e8b95e18e0cb1ee0312 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 05:18:01 +0300 Subject: [PATCH 110/354] removed an outdated FIXME comment --- src/dfx/src/lib/models/canister.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 0dfd05e86a..cb2ec289e4 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -568,7 +568,6 @@ impl CanisterPool { }; if contains { let canister_info = &canister.info; - // FIXME: Is `unwrap()` in the next operator correct? // TODO: Ignored return value is a hack. let _deps: Vec = canister.builder.get_dependencies(self, canister_info)?; } From 75692f5af17e9b68d726c05829dd85823258f5ad Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 05:18:43 +0300 Subject: [PATCH 111/354] removed a wrong FIXME comment --- src/dfx/src/lib/models/canister.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index cb2ec289e4..2229b43401 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -920,7 +920,6 @@ impl CanisterPool { Ok(()) } - // FIXME: Is this function miused? pub fn canisters_to_build(&self, build_config: &BuildConfig) -> Vec<&Arc> { if let Some(canister_names) = &build_config.canisters_to_build { self.canisters From 965849e048300b7101401796640d564ebc2f6601 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 05:25:09 +0300 Subject: [PATCH 112/354] code formatting --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 21060c8cf3..78567b46cb 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -100,12 +100,13 @@ pub async fn deploy_canisters( let canisters_to_install: Vec = canisters_to_build .clone() .into_iter() - .filter(|canister_name| - !pull_canisters_in_config.contains_key(canister_name) && - (some_canister == Some(canister_name) || // do deploy a canister that was explicitly specified + .filter(|canister_name| { + !pull_canisters_in_config.contains_key(canister_name) + && (some_canister == Some(canister_name) || // do deploy a canister that was explicitly specified // TODO: This is a hack. config.get_config().get_canister_config(canister_name).map_or( - true, |canister_config| canister_config.deploy))) + true, |canister_config| canister_config.deploy)) + }) .collect(); if some_canister.is_some() { From e819c7f5955325b52159bbf9cea26429b40278f3 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 17:26:48 +0300 Subject: [PATCH 113/354] function renamed --- src/dfx/src/lib/builders/motoko.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 60e31add18..81b295db15 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -47,7 +47,7 @@ impl MotokoBuilder { fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { let motoko_info = info.as_info::()?; #[context("Failed recursive dependency detection at {}.", file.display())] - fn get_imports_recursive ( + fn add_imports_recursive ( cache: &dyn Cache, file: &Path, imports: &mut ImportsTracker, @@ -76,13 +76,13 @@ fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac let child = MotokoImport::try_from(line).context("Failed to create MotokoImport.")?; match &child { MotokoImport::Relative(path) => { - get_imports_recursive(cache, path.as_path(), imports, pool, None)?; + add_imports_recursive(cache, path.as_path(), imports, pool, None)?; } MotokoImport::Canister(canister_name) => { // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = canister.get_info().get_main_file(); if let Some(main_file) = main_file { - get_imports_recursive(cache, Path::new(main_file), imports, pool, None)?; + add_imports_recursive(cache, Path::new(main_file), imports, pool, None)?; } } } @@ -96,7 +96,7 @@ fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac Ok(()) } - get_imports_recursive(cache, motoko_info.get_main_path(), imports, pool, Some(info))?; + add_imports_recursive(cache, motoko_info.get_main_path(), imports, pool, Some(info))?; Ok(()) } From 382bef4b16f6437c2a28cbebb744f4b351442e54 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 18 Apr 2024 18:48:37 +0300 Subject: [PATCH 114/354] CHANGELOG correction --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index af8c3759b3..7da44b45c3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ ### feat: rebuild only necessary canisters -Cache `get_imports()` results. +Cache `get_imports()` (renamed to `add_imports()`) results. Read only those `--print-deps` dependencies that are necessary to read. From 25b2d4de89610ede53e351f5fc4cc81ee7c484a1 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 20 Apr 2024 23:22:08 +0300 Subject: [PATCH 115/354] removed FIXME comment --- src/dfx/src/lib/canister_info/motoko.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/canister_info/motoko.rs b/src/dfx/src/lib/canister_info/motoko.rs index 28f3949dbb..e540328fa3 100644 --- a/src/dfx/src/lib/canister_info/motoko.rs +++ b/src/dfx/src/lib/canister_info/motoko.rs @@ -54,7 +54,7 @@ impl MotokoCanisterInfo { impl CanisterInfoFactory for MotokoCanisterInfo { fn create(info: &CanisterInfo) -> DfxResult { - // let workspace_root = info.get_workspace_root(); // FIXME: Is it correct that I commented it out? // I commented it out to have consistent relative paths. + // let workspace_root = info.get_workspace_root(); // I commented it out to have consistent relative paths. let name = info.get_name(); ensure!( matches!(info.type_specific, CanisterTypeProperties::Motoko { .. }), From 8339f1436f13118bce2c0bb7882605e18064e44c Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 20 Apr 2024 23:59:47 +0300 Subject: [PATCH 116/354] removed superfluous code --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 2229b43401..9ce743aa5d 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -605,7 +605,7 @@ impl CanisterPool { } }; let parent_canister = self.get_first_canister_with_name(&parent_name).unwrap().canister_id(); - let _ = *dest_id_set.entry(start_node).or_insert_with(|| dest_graph.add_node(parent_canister)); + dest_id_set.entry(start_node).or_insert_with(|| dest_graph.add_node(parent_canister)); let bfs = Bfs::new(&source_graph, start_node); let mut filtered_bfs = BfsFiltered::new(bfs); From e3ce033036229d8f966c0f2216cfd17733be5955 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 00:00:40 +0300 Subject: [PATCH 117/354] removed a FIXME comment --- src/dfx/src/lib/models/canister.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 9ce743aa5d..21121e2125 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -620,7 +620,6 @@ impl CanisterPool { } }, |&source_parent_id, &source_child_id| { - // FIXME: Is the chain of `unwrap`s and `panic`s correct? let parent = source_graph.node_weight(source_parent_id).unwrap(); let parent_name = match parent { MotokoImport::Canister(name) => name, From e69ae95db15eae9ea42ee20802d58e3fe7541d5d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 00:06:45 +0300 Subject: [PATCH 118/354] issue for a dependency --- src/dfx/src/lib/graph/traverse_filtered.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index a467f8ecf4..6f061312ce 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -25,7 +25,7 @@ impl BfsFiltered { if let Some(first_id) = self.base.next(graph) { while let Some(source_child_id) = &self.base.next(graph) { if (&mut predicate)(source_child_id) { - // TODO: Create a `petgraph` issue asking to explain the next line of code workings. + // Requested to document the next line behavior in https://github.com/petgraph/petgraph/issues/634 let source_parent_id = self.base.stack.iter().map(|e| *e).chain(once(first_id)).find(&mut predicate); if let Some(source_parent_id) = &source_parent_id { (&mut call)(source_parent_id, &source_child_id); From 7e795d956a350cce694f987e5d48ca93b308e428 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 00:10:21 +0300 Subject: [PATCH 119/354] reported an issue to petgraph --- src/dfx/src/lib/graph/traverse_filtered.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 6f061312ce..24e7ce0ead 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -1,6 +1,5 @@ use std::iter::once; -// TODO: Somebody, adopt this code (and DFS) to `petgraph`. use petgraph::{data::DataMap, visit::{Bfs, IntoNeighbors, VisitMap}}; pub struct BfsFiltered { From 12d08d591dc1451d068641763d74c777a4f4b2a2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 00:18:36 +0300 Subject: [PATCH 120/354] TODO comment moved --- src/dfx/src/lib/models/canister.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 21121e2125..925ed9c8dd 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -439,6 +439,7 @@ fn check_valid_subtype(compiled_idl_path: &Path, specified_idl_path: &Path) -> D } /// TODO: Motoko-specific code not here +/// TODO: Copying this type uses `String.clone()` what may be inefficient. #[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)] pub enum MotokoImport { Canister(String), @@ -589,7 +590,7 @@ impl CanisterPool { let source_graph = &self.imports.borrow().graph; let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = - real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); // `clone` is inefficient. + real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); let start: Vec<_> = start.into_iter().map(|node| *source_ids.get(&node).unwrap()).collect(); // Transform the graph of file dependencies to graph of canister dependencies. // For this do DFS for each of `real_canisters_to_build`. From 90edb6b71bb72df3faec2a3bb7ae2fb09913122a Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 01:35:54 +0300 Subject: [PATCH 121/354] bug fix --- src/dfx/src/lib/models/canister.rs | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 925ed9c8dd..df91afe57c 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -575,23 +575,18 @@ impl CanisterPool { } let real_canisters_to_build: Vec<_> = match canisters_to_build { - // TODO: (In below branches) isn't this check too strong? We can depend on a Rust canister for instance. - Some(canisters_to_build) => - canisters_to_build.into_iter().filter( - |name| self.get_first_canister_with_name(&name).unwrap().get_info().as_info::().is_ok() - ).collect(), - None => self.canisters.iter().filter_map( - |canister| if canister.get_info().as_info::().is_ok() { - Some(canister.get_name().to_string()) - } else { - None - }).collect(), + Some(canisters_to_build) => canisters_to_build, + None => self.canisters.iter().map(|canister| canister.get_name().to_string()).collect(), }; let source_graph = &self.imports.borrow().graph; let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); - let start: Vec<_> = start.into_iter().map(|node| *source_ids.get(&node).unwrap()).collect(); + let start: Vec<_> = start.into_iter().filter_map(|node| if let Some(id) = source_ids.get(&node) { + Some(*id) + } else { + None + }).collect(); // Transform the graph of file dependencies to graph of canister dependencies. // For this do DFS for each of `real_canisters_to_build`. let mut dest_graph: DiGraph = DiGraph::new(); From 692ac9479c5f9814a34a824441e9902c9c3a8841 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 01:36:38 +0300 Subject: [PATCH 122/354] small code adjustments --- src/dfx/src/lib/models/canister.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index df91afe57c..5998bec4f3 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -2,7 +2,6 @@ use crate::lib::builders::{ custom_download, BuildConfig, BuildOutput, BuilderPool, CanisterBuilder, IdlBuildOutput, WasmBuildOutput, }; -use crate::lib::canister_info::motoko::MotokoCanisterInfo; use crate::lib::canister_info::CanisterInfo; use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; @@ -582,8 +581,8 @@ impl CanisterPool { let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); - let start: Vec<_> = start.into_iter().filter_map(|node| if let Some(id) = source_ids.get(&node) { - Some(*id) + let start: Vec<_> = start.into_iter().filter_map(|node| if let Some(&id) = source_ids.get(&node) { + Some(id) } else { None }).collect(); From 3b723d2a8b1e2144dafe901d0503d50ec78af8a4 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 01:37:53 +0300 Subject: [PATCH 123/354] rename --- src/dfx/src/lib/builders/motoko.rs | 40 +++++++++++++++--------------- src/dfx/src/lib/models/canister.rs | 19 +++++++------- 2 files changed, 30 insertions(+), 29 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 81b295db15..a204999730 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -6,7 +6,7 @@ use crate::lib::canister_info::CanisterInfo; use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE}; -use crate::lib::models::canister::{CanisterPool, ImportsTracker, MotokoImport}; +use crate::lib::models::canister::{CanisterPool, ImportsTracker, Import}; use crate::lib::package_arguments::{self, PackageArguments}; use crate::util::assets::management_idl; use anyhow::Context; @@ -55,9 +55,9 @@ fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac top: Option<&CanisterInfo>, // hackish ) -> DfxResult { let parent = if let Some(top) = top { - MotokoImport::Canister(top.get_name().to_string()) // a little inefficient + Import::Canister(top.get_name().to_string()) // a little inefficient } else { - MotokoImport::Relative(file.to_path_buf()) + Import::Relative(file.to_path_buf()) }; if let Some(_) = imports.nodes.get(&parent) { // The item is already in the graph. return Ok(()); @@ -73,12 +73,12 @@ fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTrac let output = String::from_utf8_lossy(&output.stdout); for line in output.lines() { - let child = MotokoImport::try_from(line).context("Failed to create MotokoImport.")?; + let child = Import::try_from(line).context("Failed to create MotokoImport.")?; match &child { - MotokoImport::Relative(path) => { + Import::Relative(path) => { add_imports_recursive(cache, path.as_path(), imports, pool, None)?; } - MotokoImport::Canister(canister_name) => { // duplicate code + Import::Canister(canister_name) => { // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = canister.get_info().get_main_file(); if let Some(main_file) = main_file { @@ -114,14 +114,14 @@ impl CanisterBuilder for MotokoBuilder { match petgraph::algo::toposort(&pool.imports.borrow().graph, None) { Ok(order) => { Ok(order.into_iter().filter_map(|id| match graph.node_weight(id) { - Some(MotokoImport::Canister(name)) => pool.get_first_canister_with_name(name.as_str()), // TODO: a little inefficient + Some(Import::Canister(name)) => pool.get_first_canister_with_name(name.as_str()), // TODO: a little inefficient _ => None, }).map(|canister| canister.canister_id()).collect()) } Err(err) => { let message = match graph.node_weight(err.node_id()) { Some(canister_id) => match canister_id { - MotokoImport::Canister(name) => &name, + Import::Canister(name) => &name, _ => "", }, None => "", @@ -175,7 +175,7 @@ impl CanisterBuilder for MotokoBuilder { add_imports(cache.as_ref(), canister_info, &mut *pool.imports.borrow_mut(), pool)?; // If the management canister is being imported, emit the candid file. - if pool.imports.borrow().nodes.contains_key(&MotokoImport::Ic("aaaaa-aa".to_string())) + if pool.imports.borrow().nodes.contains_key(&Import::Ic("aaaaa-aa".to_string())) { let management_idl_path = idl_dir_path.join("aaaaa-aa.did"); dfx_core::fs::write(management_idl_path, management_idl()?)?; @@ -200,10 +200,10 @@ impl CanisterBuilder for MotokoBuilder { if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { let wasm_file_time = wasm_file_metadata.modified()?; let mut imports = pool.imports.borrow_mut(); - let start = if let Some(node_index) = imports.nodes.get(&MotokoImport::Canister(canister_info.get_name().to_string())) { + let start = if let Some(node_index) = imports.nodes.get(&Import::Canister(canister_info.get_name().to_string())) { *node_index } else { - let node = MotokoImport::Relative(motoko_info.get_main_path().to_path_buf()); + let node = Import::Relative(motoko_info.get_main_path().to_path_buf()); let node_index = imports.graph.add_node(node.clone()); imports.nodes.insert(node, node_index); node_index @@ -212,7 +212,7 @@ impl CanisterBuilder for MotokoBuilder { loop { if let Some(import) = import_iter.next(&imports.graph) { let imported_file = match &imports.graph[import] { - MotokoImport::Canister(canister_name) => { // duplicate code + Import::Canister(canister_name) => { // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = canister.get_info().get_main_file(); if let Some(main_file) = main_file { @@ -224,7 +224,7 @@ impl CanisterBuilder for MotokoBuilder { None } } - MotokoImport::Ic(canister_id) => { + Import::Ic(canister_id) => { if let Some(canister_name) = rev_id_map.get(canister_id.as_str()) { if let Some(canister) = pool.get_first_canister_with_name(canister_name) { if let Some(main_file) = canister.get_info().get_main_file() { @@ -239,7 +239,7 @@ impl CanisterBuilder for MotokoBuilder { None } } - MotokoImport::Lib(_path) => { + Import::Lib(_path) => { // Skip libs, all changes by package managers don't modify existing directories but create new ones. continue; // let i = path.find('/'); @@ -265,7 +265,7 @@ impl CanisterBuilder for MotokoBuilder { // } // } } - MotokoImport::Relative(path) => { + Import::Relative(path) => { Some(Path::new(&path).to_owned()) } }; @@ -403,7 +403,7 @@ fn motoko_compile(logger: &Logger, cache: &dyn Cache, params: &MotokoParams<'_>) Ok(()) } -impl TryFrom<&str> for MotokoImport { +impl TryFrom<&str> for Import { type Error = DfxError; fn try_from(line: &str) -> Result { @@ -430,9 +430,9 @@ impl TryFrom<&str> for MotokoImport { } let (prefix, name) = url.split_at(index + 1); match prefix { - "canister:" => MotokoImport::Canister(name.to_owned()), - "ic:" => MotokoImport::Ic(name.to_owned()), - "mo:" => MotokoImport::Lib(name.to_owned()), + "canister:" => Import::Canister(name.to_owned()), + "ic:" => Import::Ic(name.to_owned()), + "mo:" => Import::Lib(name.to_owned()), _ => { return Err(DfxError::new(BuildError::DependencyError(format!( "Unknown import {}", @@ -450,7 +450,7 @@ impl TryFrom<&str> for MotokoImport { path.display() )))); }; - MotokoImport::Relative(path) + Import::Relative(path) } None => { return Err(DfxError::new(BuildError::DependencyError(format!( diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 5998bec4f3..f317cfd1f7 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -437,10 +437,11 @@ fn check_valid_subtype(compiled_idl_path: &Path, specified_idl_path: &Path) -> D Ok(()) } -/// TODO: Motoko-specific code not here +/// Used mainly for Motoko +/// /// TODO: Copying this type uses `String.clone()` what may be inefficient. #[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)] -pub enum MotokoImport { +pub enum Import { Canister(String), Ic(String), Lib(String), // TODO: Unused, because package manager never update existing files (but create new dirs) @@ -449,8 +450,8 @@ pub enum MotokoImport { /// The graph of Motoko imports (TODO: Motoko-specific code not here) pub struct ImportsTracker { - pub nodes: HashMap, - pub graph: DiGraph, + pub nodes: HashMap, + pub graph: DiGraph, } impl ImportsTracker { @@ -580,7 +581,7 @@ impl CanisterPool { let source_graph = &self.imports.borrow().graph; let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = - real_canisters_to_build.iter().map(|name| MotokoImport::Canister(name.clone())).collect(); + real_canisters_to_build.iter().map(|name| Import::Canister(name.clone())).collect(); let start: Vec<_> = start.into_iter().filter_map(|node| if let Some(&id) = source_ids.get(&node) { Some(id) } else { @@ -594,7 +595,7 @@ impl CanisterPool { // Initialize "mirrors" of the parent node of source graph in dest graph: let parent = source_graph.node_weight(start_node).unwrap(); let parent_name = match parent { - MotokoImport::Canister(name) => name, + Import::Canister(name) => name, _ => { panic!("programming error"); } @@ -608,7 +609,7 @@ impl CanisterPool { source_graph, |&s| { let source_id = source_graph.node_weight(s); - if let Some(MotokoImport::Canister(_)) = source_id { + if let Some(Import::Canister(_)) = source_id { true } else { false @@ -617,7 +618,7 @@ impl CanisterPool { |&source_parent_id, &source_child_id| { let parent = source_graph.node_weight(source_parent_id).unwrap(); let parent_name = match parent { - MotokoImport::Canister(name) => name, + Import::Canister(name) => name, _ => { panic!("programming error"); } @@ -626,7 +627,7 @@ impl CanisterPool { let child = source_graph.node_weight(source_child_id).unwrap(); let child_name = match child { - MotokoImport::Canister(name) => name, + Import::Canister(name) => name, _ => { panic!("programming error"); } From 577cda863fe15e57ba598d329ce2a4cea99d7e86 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 01:38:45 +0300 Subject: [PATCH 124/354] doc comment --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index f317cfd1f7..acb07c5ac2 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -448,7 +448,7 @@ pub enum Import { Relative(PathBuf), } -/// The graph of Motoko imports (TODO: Motoko-specific code not here) +/// The graph of imports (used mainly for Motoko) pub struct ImportsTracker { pub nodes: HashMap, pub graph: DiGraph, From cb1f419cc4de73750223aec7efbc66319c21fe83 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 21:12:52 +0300 Subject: [PATCH 125/354] corrected code order --- src/dfx/src/lib/builders/motoko.rs | 33 +++++++++++++++--------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index a204999730..5a7e34e479 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -161,26 +161,10 @@ impl CanisterBuilder for MotokoBuilder { .map(|c| (c.canister_id().to_text(), c.get_name().to_string())) .collect(); - std::fs::create_dir_all(motoko_info.get_output_root()).with_context(|| { - format!( - "Failed to create {}.", - motoko_info.get_output_root().to_string_lossy() - ) - })?; let cache = &self.cache; - let idl_dir_path = &config.idl_root; - std::fs::create_dir_all(idl_dir_path) - .with_context(|| format!("Failed to create {}.", idl_dir_path.to_string_lossy()))?; add_imports(cache.as_ref(), canister_info, &mut *pool.imports.borrow_mut(), pool)?; - // If the management canister is being imported, emit the candid file. - if pool.imports.borrow().nodes.contains_key(&Import::Ic("aaaaa-aa".to_string())) - { - let management_idl_path = idl_dir_path.join("aaaaa-aa.did"); - dfx_core::fs::write(management_idl_path, management_idl()?)?; - } - let package_arguments = package_arguments::load(cache.as_ref(), motoko_info.get_packtool())?; let mut package_arguments_map = BTreeMap::<&str, &str>::new(); @@ -290,6 +274,23 @@ impl CanisterBuilder for MotokoBuilder { } }; + std::fs::create_dir_all(motoko_info.get_output_root()).with_context(|| { + format!( + "Failed to create {}.", + motoko_info.get_output_root().to_string_lossy() + ) + })?; + let idl_dir_path = &config.idl_root; + std::fs::create_dir_all(idl_dir_path) + .with_context(|| format!("Failed to create {}.", idl_dir_path.to_string_lossy()))?; + + // If the management canister is being imported, emit the candid file. + if pool.imports.borrow().nodes.contains_key(&Import::Ic("aaaaa-aa".to_string())) + { + let management_idl_path = idl_dir_path.join("aaaaa-aa.did"); + dfx_core::fs::write(management_idl_path, management_idl()?)?; + } + let moc_arguments = match motoko_info.get_args() { Some(args) => [ package_arguments, From ac39257e85066123c7356fe3cae3acd55d8607f1 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 22:32:01 +0300 Subject: [PATCH 126/354] changed code order --- src/dfx/src/lib/builders/motoko.rs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 5a7e34e479..bc23856a1c 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -148,12 +148,6 @@ impl CanisterBuilder for MotokoBuilder { let input_path = motoko_info.get_main_path(); let output_wasm_path = motoko_info.get_output_wasm_path(); - // from name to principal: - let id_map = pool - .get_canister_list() - .iter() - .map(|c| (c.get_name().to_string(), c.canister_id().to_text())) - .collect(); // from principal to name: let rev_id_map: BTreeMap = pool .get_canister_list() @@ -274,6 +268,13 @@ impl CanisterBuilder for MotokoBuilder { } }; + // from name to principal: + let id_map = pool + .get_canister_list() + .iter() + .map(|c| (c.get_name().to_string(), c.canister_id().to_text())) + .collect(); + std::fs::create_dir_all(motoko_info.get_output_root()).with_context(|| { format!( "Failed to create {}.", From 7e9cdc4ac3dd0398d51031a0f5f786593ba502ec Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 22:32:21 +0300 Subject: [PATCH 127/354] code formatting --- src/dfx/src/lib/builders/motoko.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index bc23856a1c..ee93bbe42a 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -263,7 +263,6 @@ impl CanisterBuilder for MotokoBuilder { wasm: WasmBuildOutput::File(motoko_info.get_output_wasm_path().to_path_buf()), idl: IdlBuildOutput::File(motoko_info.get_output_idl_path().to_path_buf()), }) - } } }; From 897b6f740c9085c2aa17959bfa22619557057426 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 22:44:27 +0300 Subject: [PATCH 128/354] refactor --- src/dfx/src/lib/builders/motoko.rs | 252 +++++++++++++++-------------- 1 file changed, 135 insertions(+), 117 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index ee93bbe42a..6fb8749788 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -40,6 +40,116 @@ impl MotokoBuilder { cache: env.get_cache(), }) } + + fn should_build( + &self, + pool: &CanisterPool, + canister_info: &CanisterInfo, + ) -> DfxResult { + let motoko_info = canister_info.as_info::()?; + let output_wasm_path = motoko_info.get_output_wasm_path(); + + // from principal to name: + let rev_id_map: BTreeMap = pool + .get_canister_list() + .iter() + .map(|c| (c.canister_id().to_text(), c.get_name().to_string())) + .collect(); + + let cache = &self.cache; + + add_imports(cache.as_ref(), canister_info, &mut *pool.imports.borrow_mut(), pool)?; + + // Check that one of the dependencies is newer than the target: + if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { + let wasm_file_time = wasm_file_metadata.modified()?; + let mut imports = pool.imports.borrow_mut(); + let start = if let Some(node_index) = imports.nodes.get(&Import::Canister(canister_info.get_name().to_string())) { + *node_index + } else { + let node = Import::Relative(motoko_info.get_main_path().to_path_buf()); + let node_index = imports.graph.add_node(node.clone()); + imports.nodes.insert(node, node_index); + node_index + }; + let mut import_iter = Bfs::new(&imports.graph, start); + loop { + if let Some(import) = import_iter.next(&imports.graph) { + let imported_file = match &imports.graph[import] { + Import::Canister(canister_name) => { // duplicate code + if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { + let main_file = canister.get_info().get_main_file(); + if let Some(main_file) = main_file { + Some(main_file.to_owned()) + } else { + None + } + } else { + None + } + } + Import::Ic(canister_id) => { + if let Some(canister_name) = rev_id_map.get(canister_id.as_str()) { + if let Some(canister) = pool.get_first_canister_with_name(canister_name) { + if let Some(main_file) = canister.get_info().get_main_file() { + Some(main_file.to_owned()) + } else { + None + } + } else { + None + } + } else { + None + } + } + Import::Lib(_path) => { + // Skip libs, all changes by package managers don't modify existing directories but create new ones. + continue; + // let i = path.find('/'); + // let pre_path = if let Some(i) = i { + // let expanded = Path::new( + // package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? + // ); + // expanded.join(&path[i+1..]) + // } else { + // Path::new(path.as_str()).to_owned() + // }; + // let path2 = pre_path.to_str().unwrap().to_owned() + ".mo"; + // let path2 = path2.to_string(); + // let path2 = Path::new(&path2); + // if path2.exists() { // TODO: Is it correct order of two variants? + // Some(Path::new(path2).to_owned()) + // } else { + // let path3 = pre_path.join(Path::new("lib.mo")); + // if path3.exists() { + // Some(path3.to_owned()) + // } else { + // bail!("source file has been deleted"); + // } + // } + } + Import::Relative(path) => { + Some(Path::new(&path).to_owned()) + } + }; + if let Some(imported_file) = imported_file { + let imported_file_metadata = metadata(imported_file.as_ref())?; + let imported_file_time = imported_file_metadata.modified()?; + if imported_file_time > wasm_file_time { + break; + }; + }; + } else { + trace!(self.logger, "Canister {} already compiled.", canister_info.get_name()); + return Ok(false); + } + } + }; + + Ok(true) + } + } /// Add imports originating from canister `info` to the graph `imports` of dependencies. @@ -148,125 +258,16 @@ impl CanisterBuilder for MotokoBuilder { let input_path = motoko_info.get_main_path(); let output_wasm_path = motoko_info.get_output_wasm_path(); - // from principal to name: - let rev_id_map: BTreeMap = pool - .get_canister_list() - .iter() - .map(|c| (c.canister_id().to_text(), c.get_name().to_string())) - .collect(); - - let cache = &self.cache; - - add_imports(cache.as_ref(), canister_info, &mut *pool.imports.borrow_mut(), pool)?; - - let package_arguments = - package_arguments::load(cache.as_ref(), motoko_info.get_packtool())?; - let mut package_arguments_map = BTreeMap::<&str, &str>::new(); - { // block - let mut i = 0; - while i + 3 <= package_arguments.len() { - if package_arguments[i] == "--package" { - package_arguments_map.insert(&package_arguments[i+1], &package_arguments[i+2]); - i += 3; - } else { - i += 1; - } - }; + if !self.should_build(pool, canister_info)? { + return Ok(BuildOutput { // duplicate code + canister_id: canister_info + .get_canister_id() + .expect("Could not find canister ID."), + wasm: WasmBuildOutput::File(motoko_info.get_output_wasm_path().to_path_buf()), + idl: IdlBuildOutput::File(motoko_info.get_output_idl_path().to_path_buf()), + }); } - // Check that one of the dependencies is newer than the target: - if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { - let wasm_file_time = wasm_file_metadata.modified()?; - let mut imports = pool.imports.borrow_mut(); - let start = if let Some(node_index) = imports.nodes.get(&Import::Canister(canister_info.get_name().to_string())) { - *node_index - } else { - let node = Import::Relative(motoko_info.get_main_path().to_path_buf()); - let node_index = imports.graph.add_node(node.clone()); - imports.nodes.insert(node, node_index); - node_index - }; - let mut import_iter = Bfs::new(&imports.graph, start); - loop { - if let Some(import) = import_iter.next(&imports.graph) { - let imported_file = match &imports.graph[import] { - Import::Canister(canister_name) => { // duplicate code - if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { - let main_file = canister.get_info().get_main_file(); - if let Some(main_file) = main_file { - Some(main_file.to_owned()) - } else { - None - } - } else { - None - } - } - Import::Ic(canister_id) => { - if let Some(canister_name) = rev_id_map.get(canister_id.as_str()) { - if let Some(canister) = pool.get_first_canister_with_name(canister_name) { - if let Some(main_file) = canister.get_info().get_main_file() { - Some(main_file.to_owned()) - } else { - None - } - } else { - None - } - } else { - None - } - } - Import::Lib(_path) => { - // Skip libs, all changes by package managers don't modify existing directories but create new ones. - continue; - // let i = path.find('/'); - // let pre_path = if let Some(i) = i { - // let expanded = Path::new( - // package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? - // ); - // expanded.join(&path[i+1..]) - // } else { - // Path::new(path.as_str()).to_owned() - // }; - // let path2 = pre_path.to_str().unwrap().to_owned() + ".mo"; - // let path2 = path2.to_string(); - // let path2 = Path::new(&path2); - // if path2.exists() { // TODO: Is it correct order of two variants? - // Some(Path::new(path2).to_owned()) - // } else { - // let path3 = pre_path.join(Path::new("lib.mo")); - // if path3.exists() { - // Some(path3.to_owned()) - // } else { - // bail!("source file has been deleted"); - // } - // } - } - Import::Relative(path) => { - Some(Path::new(&path).to_owned()) - } - }; - if let Some(imported_file) = imported_file { - let imported_file_metadata = metadata(imported_file.as_ref())?; - let imported_file_time = imported_file_metadata.modified()?; - if imported_file_time > wasm_file_time { - break; - }; - }; - } else { - trace!(self.logger, "Canister {} already compiled.", canister_info.get_name()); - return Ok(BuildOutput { // duplicate code - canister_id: canister_info - .get_canister_id() - .expect("Could not find canister ID."), - wasm: WasmBuildOutput::File(motoko_info.get_output_wasm_path().to_path_buf()), - idl: IdlBuildOutput::File(motoko_info.get_output_idl_path().to_path_buf()), - }) - } - } - }; - // from name to principal: let id_map = pool .get_canister_list() @@ -291,6 +292,23 @@ impl CanisterBuilder for MotokoBuilder { dfx_core::fs::write(management_idl_path, management_idl()?)?; } + let cache = &self.cache; + + let package_arguments = + package_arguments::load(cache.as_ref(), motoko_info.get_packtool())?; + let mut package_arguments_map = BTreeMap::<&str, &str>::new(); + { // block + let mut i = 0; + while i + 3 <= package_arguments.len() { + if package_arguments[i] == "--package" { + package_arguments_map.insert(&package_arguments[i+1], &package_arguments[i+2]); + i += 3; + } else { + i += 1; + } + }; + } + let moc_arguments = match motoko_info.get_args() { Some(args) => [ package_arguments, From 87535d1fb1a6e90121c7e8e5d3374c731d9f4f7c Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 23:48:46 +0300 Subject: [PATCH 129/354] refactor --- src/dfx-core/src/config/cache.rs | 7 ++ src/dfx/src/lib/builders/mod.rs | 119 +++++++++++++++++++++++- src/dfx/src/lib/builders/motoko.rs | 15 +-- src/dfx/src/lib/canister_info.rs | 7 ++ src/dfx/src/lib/canister_info/motoko.rs | 5 - 5 files changed, 140 insertions(+), 13 deletions(-) diff --git a/src/dfx-core/src/config/cache.rs b/src/dfx-core/src/config/cache.rs index b365524935..22e0cef709 100644 --- a/src/dfx-core/src/config/cache.rs +++ b/src/dfx-core/src/config/cache.rs @@ -5,6 +5,7 @@ use crate::error::cache::CacheError; use crate::foundation::get_user_home; use semver::Version; use std::path::PathBuf; +use std::fmt::Debug; pub trait Cache { fn version_str(&self) -> String; @@ -14,6 +15,12 @@ pub trait Cache { fn get_binary_command(&self, binary_name: &str) -> Result; } +impl Debug for dyn Cache { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "[cache version {}]", self.version_str()) + } +} + pub fn get_cache_root() -> Result { let cache_root = std::env::var_os("DFX_CACHE_ROOT"); // dirs-next is not used for *nix to preserve existing paths diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 866f7cd1ff..ee5e979ac8 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -3,19 +3,22 @@ use crate::lib::canister_info::CanisterInfo; use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::models::canister::CanisterPool; +use crate::lib::models::canister::Import; use anyhow::{bail, Context}; use candid::Principal as CanisterId; use candid_parser::utils::CandidSource; +use dfx_core::config::cache::Cache; use dfx_core::config::model::dfinity::{Config, Profile}; use dfx_core::network::provider::get_network_context; use dfx_core::util; use fn_error_context::context; use handlebars::Handlebars; +use petgraph::visit::Bfs; use std::borrow::Cow; use std::collections::BTreeMap; use std::ffi::OsStr; use std::fmt::Write; -use std::fs; +use std::fs::{self, metadata}; use std::io::Read; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; @@ -29,6 +32,8 @@ mod rust; pub use custom::custom_download; +use self::motoko::add_imports; + #[derive(Debug)] pub enum WasmBuildOutput { // Wasm(Vec), @@ -221,6 +226,118 @@ pub trait CanisterBuilder { Ok(()) } + fn should_build( + &self, + pool: &CanisterPool, + canister_info: &CanisterInfo, + cache: &dyn Cache, + ) -> DfxResult { + // let motoko_info = canister_info.as_info::()?; + let output_wasm_path = canister_info.get_output_wasm_path(); + + // from principal to name: + let rev_id_map: BTreeMap = pool + .get_canister_list() + .iter() + .map(|c| (c.canister_id().to_text(), c.get_name().to_string())) + .collect(); + + if canister_info.is_motoko() { // hack + add_imports(cache, canister_info, &mut *pool.imports.borrow_mut(), pool)?; + } + + // Check that one of the dependencies is newer than the target: + if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { + let wasm_file_time = wasm_file_metadata.modified()?; + let imports = pool.imports.borrow_mut(); + let start = if let Some(node_index) = imports.nodes.get(&Import::Canister(canister_info.get_name().to_string())) { + *node_index + } else { + panic!("programming error"); // FIXME: correct? + // let node = Import::Relative(canister_info.get_main_path().to_path_buf()); + // let node_index = imports.graph.add_node(node.clone()); + // imports.nodes.insert(node, node_index); + // node_index + }; + let mut import_iter = Bfs::new(&imports.graph, start); + loop { + if let Some(import) = import_iter.next(&imports.graph) { + let imported_file = match &imports.graph[import] { + Import::Canister(canister_name) => { // duplicate code + if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { + let main_file = canister.get_info().get_main_file(); + if let Some(main_file) = main_file { + Some(main_file.to_owned()) + } else { + None + } + } else { + None + } + } + Import::Ic(canister_id) => { + if let Some(canister_name) = rev_id_map.get(canister_id.as_str()) { + if let Some(canister) = pool.get_first_canister_with_name(canister_name) { + if let Some(main_file) = canister.get_info().get_main_file() { + Some(main_file.to_owned()) + } else { + None + } + } else { + None + } + } else { + None + } + } + Import::Lib(_path) => { + // Skip libs, all changes by package managers don't modify existing directories but create new ones. + continue; + // let i = path.find('/'); + // let pre_path = if let Some(i) = i { + // let expanded = Path::new( + // package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? + // ); + // expanded.join(&path[i+1..]) + // } else { + // Path::new(path.as_str()).to_owned() + // }; + // let path2 = pre_path.to_str().unwrap().to_owned() + ".mo"; + // let path2 = path2.to_string(); + // let path2 = Path::new(&path2); + // if path2.exists() { // TODO: Is it correct order of two variants? + // Some(Path::new(path2).to_owned()) + // } else { + // let path3 = pre_path.join(Path::new("lib.mo")); + // if path3.exists() { + // Some(path3.to_owned()) + // } else { + // bail!("source file has been deleted"); + // } + // } + } + Import::Relative(path) => { + Some(Path::new(&path).to_owned()) + } + }; + if let Some(imported_file) = imported_file { + let imported_file_metadata = metadata(&imported_file)?; + let imported_file_time = imported_file_metadata.modified()?; + if imported_file_time > wasm_file_time { + break; + }; + }; + } else { + // FIXME: Uncomment: + // trace!(self.logger, "Canister {} already compiled.", canister_info.get_name()); + return Ok(false); + } + } + }; + + Ok(true) + } + /// Get the path to the provided candid file for the canister. /// No need to guarantee the file exists, as the caller will handle that. fn get_candid_path( diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 6fb8749788..ba749a5fe4 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -41,13 +41,14 @@ impl MotokoBuilder { }) } + // FIXME: Remove the function from here, because it has been copied to `lib::builders` fn should_build( &self, pool: &CanisterPool, canister_info: &CanisterInfo, ) -> DfxResult { - let motoko_info = canister_info.as_info::()?; - let output_wasm_path = motoko_info.get_output_wasm_path(); + let motoko_info = canister_info.as_info::()?; // TODO: Remove. + let output_wasm_path = canister_info.get_output_wasm_path(); // from principal to name: let rev_id_map: BTreeMap = pool @@ -154,7 +155,7 @@ impl MotokoBuilder { /// Add imports originating from canister `info` to the graph `imports` of dependencies. #[context("Failed to find imports for canister at '{}'.", info.as_info::().unwrap().get_main_path().display())] -fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { +pub fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { let motoko_info = info.as_info::()?; #[context("Failed recursive dependency detection at {}.", file.display())] fn add_imports_recursive ( @@ -253,17 +254,17 @@ impl CanisterBuilder for MotokoBuilder { canister_info: &CanisterInfo, config: &BuildConfig, ) -> DfxResult { - let motoko_info = canister_info.as_info::()?; + let motoko_info = canister_info.as_info::()?; // TODO: Remove. let profile = config.profile; let input_path = motoko_info.get_main_path(); - let output_wasm_path = motoko_info.get_output_wasm_path(); + let output_wasm_path = canister_info.get_output_wasm_path(); if !self.should_build(pool, canister_info)? { return Ok(BuildOutput { // duplicate code canister_id: canister_info .get_canister_id() .expect("Could not find canister ID."), - wasm: WasmBuildOutput::File(motoko_info.get_output_wasm_path().to_path_buf()), + wasm: WasmBuildOutput::File(canister_info.get_output_wasm_path().to_path_buf()), idl: IdlBuildOutput::File(motoko_info.get_output_idl_path().to_path_buf()), }); } @@ -349,7 +350,7 @@ impl CanisterBuilder for MotokoBuilder { canister_id: canister_info .get_canister_id() .expect("Could not find canister ID."), - wasm: WasmBuildOutput::File(motoko_info.get_output_wasm_path().to_path_buf()), + wasm: WasmBuildOutput::File(canister_info.get_output_wasm_path().to_path_buf()), idl: IdlBuildOutput::File(motoko_info.get_output_idl_path().to_path_buf()), }) } diff --git a/src/dfx/src/lib/canister_info.rs b/src/dfx/src/lib/canister_info.rs index ada2099d57..b66f199f0b 100644 --- a/src/dfx/src/lib/canister_info.rs +++ b/src/dfx/src/lib/canister_info.rs @@ -61,6 +61,7 @@ pub struct CanisterInfo { tech_stack: Option, gzip: bool, init_arg: Option, + output_wasm_path: PathBuf, } impl CanisterInfo { @@ -148,6 +149,8 @@ impl CanisterInfo { let gzip = canister_config.gzip.unwrap_or(false); let init_arg = canister_config.init_arg.clone(); + let output_wasm_path = output_root.join(name).with_extension("wasm"); + let canister_info = CanisterInfo { name: name.to_string(), declarations_config, @@ -170,6 +173,7 @@ impl CanisterInfo { pull_dependencies, gzip, init_arg, + output_wasm_path, }; Ok(canister_info) @@ -203,6 +207,9 @@ impl CanisterInfo { pub fn get_output_root(&self) -> &Path { &self.output_root } + pub fn get_output_wasm_path(&self) -> &Path { + self.output_wasm_path.as_path() + } #[context("Failed to get canister id for '{}'.", self.name)] pub fn get_canister_id(&self) -> DfxResult { diff --git a/src/dfx/src/lib/canister_info/motoko.rs b/src/dfx/src/lib/canister_info/motoko.rs index e540328fa3..dee987bfb5 100644 --- a/src/dfx/src/lib/canister_info/motoko.rs +++ b/src/dfx/src/lib/canister_info/motoko.rs @@ -8,7 +8,6 @@ pub struct MotokoCanisterInfo { input_path: PathBuf, output_root: PathBuf, - output_wasm_path: PathBuf, output_idl_path: PathBuf, output_stable_path: PathBuf, output_did_js_path: PathBuf, @@ -23,9 +22,6 @@ impl MotokoCanisterInfo { pub fn get_main_path(&self) -> &Path { self.input_path.as_path() } - pub fn get_output_wasm_path(&self) -> &Path { - self.output_wasm_path.as_path() - } pub fn get_output_idl_path(&self) -> &Path { self.output_idl_path.as_path() } @@ -80,7 +76,6 @@ impl CanisterInfoFactory for MotokoCanisterInfo { Ok(MotokoCanisterInfo { input_path, output_root, - output_wasm_path, output_idl_path, output_stable_path, output_did_js_path, From 2620184b17c314fde2a466e36e0b5222e409ca23 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 21 Apr 2024 23:52:29 +0300 Subject: [PATCH 130/354] removed old version of a function --- src/dfx/src/lib/builders/motoko.rs | 115 +---------------------------- 1 file changed, 1 insertion(+), 114 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index ba749a5fe4..2661933bd9 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -13,9 +13,7 @@ use anyhow::Context; use candid::Principal as CanisterId; use dfx_core::config::cache::Cache; use dfx_core::config::model::dfinity::{MetadataVisibility, Profile}; -use dfx_core::fs::metadata; use fn_error_context::context; -use petgraph::visit::Bfs; use slog::{info, o, trace, warn, Logger}; use std::collections::BTreeMap; use std::convert::TryFrom; @@ -40,117 +38,6 @@ impl MotokoBuilder { cache: env.get_cache(), }) } - - // FIXME: Remove the function from here, because it has been copied to `lib::builders` - fn should_build( - &self, - pool: &CanisterPool, - canister_info: &CanisterInfo, - ) -> DfxResult { - let motoko_info = canister_info.as_info::()?; // TODO: Remove. - let output_wasm_path = canister_info.get_output_wasm_path(); - - // from principal to name: - let rev_id_map: BTreeMap = pool - .get_canister_list() - .iter() - .map(|c| (c.canister_id().to_text(), c.get_name().to_string())) - .collect(); - - let cache = &self.cache; - - add_imports(cache.as_ref(), canister_info, &mut *pool.imports.borrow_mut(), pool)?; - - // Check that one of the dependencies is newer than the target: - if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { - let wasm_file_time = wasm_file_metadata.modified()?; - let mut imports = pool.imports.borrow_mut(); - let start = if let Some(node_index) = imports.nodes.get(&Import::Canister(canister_info.get_name().to_string())) { - *node_index - } else { - let node = Import::Relative(motoko_info.get_main_path().to_path_buf()); - let node_index = imports.graph.add_node(node.clone()); - imports.nodes.insert(node, node_index); - node_index - }; - let mut import_iter = Bfs::new(&imports.graph, start); - loop { - if let Some(import) = import_iter.next(&imports.graph) { - let imported_file = match &imports.graph[import] { - Import::Canister(canister_name) => { // duplicate code - if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { - let main_file = canister.get_info().get_main_file(); - if let Some(main_file) = main_file { - Some(main_file.to_owned()) - } else { - None - } - } else { - None - } - } - Import::Ic(canister_id) => { - if let Some(canister_name) = rev_id_map.get(canister_id.as_str()) { - if let Some(canister) = pool.get_first_canister_with_name(canister_name) { - if let Some(main_file) = canister.get_info().get_main_file() { - Some(main_file.to_owned()) - } else { - None - } - } else { - None - } - } else { - None - } - } - Import::Lib(_path) => { - // Skip libs, all changes by package managers don't modify existing directories but create new ones. - continue; - // let i = path.find('/'); - // let pre_path = if let Some(i) = i { - // let expanded = Path::new( - // package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? - // ); - // expanded.join(&path[i+1..]) - // } else { - // Path::new(path.as_str()).to_owned() - // }; - // let path2 = pre_path.to_str().unwrap().to_owned() + ".mo"; - // let path2 = path2.to_string(); - // let path2 = Path::new(&path2); - // if path2.exists() { // TODO: Is it correct order of two variants? - // Some(Path::new(path2).to_owned()) - // } else { - // let path3 = pre_path.join(Path::new("lib.mo")); - // if path3.exists() { - // Some(path3.to_owned()) - // } else { - // bail!("source file has been deleted"); - // } - // } - } - Import::Relative(path) => { - Some(Path::new(&path).to_owned()) - } - }; - if let Some(imported_file) = imported_file { - let imported_file_metadata = metadata(imported_file.as_ref())?; - let imported_file_time = imported_file_metadata.modified()?; - if imported_file_time > wasm_file_time { - break; - }; - }; - } else { - trace!(self.logger, "Canister {} already compiled.", canister_info.get_name()); - return Ok(false); - } - } - }; - - Ok(true) - } - } /// Add imports originating from canister `info` to the graph `imports` of dependencies. @@ -259,7 +146,7 @@ impl CanisterBuilder for MotokoBuilder { let input_path = motoko_info.get_main_path(); let output_wasm_path = canister_info.get_output_wasm_path(); - if !self.should_build(pool, canister_info)? { + if !self.should_build(pool, canister_info, self.cache.as_ref())? { return Ok(BuildOutput { // duplicate code canister_id: canister_info .get_canister_id() From ea82189e1383b2fa47d02809df0b0a103ef5f766 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 00:37:38 +0300 Subject: [PATCH 131/354] mostly ready --- src/dfx/src/commands/build.rs | 6 +++--- src/dfx/src/commands/generate.rs | 6 +++--- src/dfx/src/lib/builders/motoko.rs | 10 ---------- src/dfx/src/lib/models/canister.rs | 18 ++++++++++++++---- .../operations/canister/deploy_canisters.rs | 11 ++++++++++- 5 files changed, 30 insertions(+), 21 deletions(-) diff --git a/src/dfx/src/commands/build.rs b/src/dfx/src/commands/build.rs index 2957c453d3..250fb74168 100644 --- a/src/dfx/src/commands/build.rs +++ b/src/dfx/src/commands/build.rs @@ -33,8 +33,8 @@ pub struct CanisterBuildOpts { network: NetworkOpt, } -pub fn exec(env: &dyn Environment, opts: CanisterBuildOpts) -> DfxResult { - let env = create_agent_environment(env, opts.network.to_network_name())?; +pub fn exec(env1: &dyn Environment, opts: CanisterBuildOpts) -> DfxResult { + let env = create_agent_environment(env1, opts.network.to_network_name())?; let logger = env.get_logger(); @@ -90,7 +90,7 @@ pub fn exec(env: &dyn Environment, opts: CanisterBuildOpts) -> DfxResult { .with_build_mode_check(build_mode_check) .with_canisters_to_build(canisters_to_build) .with_env_file(env_file); - runtime.block_on(canister_pool.build_or_fail(logger, &build_config))?; + runtime.block_on(canister_pool.build_or_fail(env1, logger, &build_config))?; Ok(()) } diff --git a/src/dfx/src/commands/generate.rs b/src/dfx/src/commands/generate.rs index 17992ef765..46cd43e5f4 100644 --- a/src/dfx/src/commands/generate.rs +++ b/src/dfx/src/commands/generate.rs @@ -20,8 +20,8 @@ pub struct GenerateOpts { network: Option, } -pub fn exec(env: &dyn Environment, opts: GenerateOpts) -> DfxResult { - let env = create_anonymous_agent_environment(env, None)?; +pub fn exec(env1: &dyn Environment, opts: GenerateOpts) -> DfxResult { + let env = create_anonymous_agent_environment(env1, None)?; let log = env.get_logger(); // Read the config. @@ -74,7 +74,7 @@ pub fn exec(env: &dyn Environment, opts: GenerateOpts) -> DfxResult { let canister_pool_build = CanisterPool::load(&env, true, &build_dependees)?; slog::info!(log, "Building canisters before generate for Motoko"); let runtime = Runtime::new().expect("Unable to create a runtime"); - runtime.block_on(canister_pool_build.build_or_fail(log, &build_config))?; + runtime.block_on(canister_pool_build.build_or_fail(env1, log, &build_config))?; } for canister in canister_pool_load.canisters_to_build(&generate_config) { diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 2661933bd9..6a49bdb359 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -146,16 +146,6 @@ impl CanisterBuilder for MotokoBuilder { let input_path = motoko_info.get_main_path(); let output_wasm_path = canister_info.get_output_wasm_path(); - if !self.should_build(pool, canister_info, self.cache.as_ref())? { - return Ok(BuildOutput { // duplicate code - canister_id: canister_info - .get_canister_id() - .expect("Could not find canister ID."), - wasm: WasmBuildOutput::File(canister_info.get_output_wasm_path().to_path_buf()), - idl: IdlBuildOutput::File(motoko_info.get_output_idl_path().to_path_buf()), - }); - } - // from name to principal: let id_map = pool .get_canister_list() diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index acb07c5ac2..614c3220ed 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -40,8 +40,9 @@ use std::sync::Arc; /// Once an instance of a canister is built it is immutable. So for comparing /// two canisters one can use their ID. pub struct Canister { - info: CanisterInfo, - builder: Arc, + // TODO: Two below `pubs` are a hack. + pub info: CanisterInfo, + pub builder: Arc, output: RefCell>, } unsafe impl Send for Canister {} @@ -756,9 +757,12 @@ impl CanisterPool { } /// Build all canisters, returning a vector of results of each builds. + /// + /// TODO: `log` can be got from `env`, can't it? #[context("Failed while trying to build all canisters in the canister pool.")] pub fn build( &self, + env: &dyn Environment, log: &Logger, build_config: &BuildConfig, ) -> DfxResult>> { @@ -798,6 +802,10 @@ impl CanisterPool { trace!(log, "Not building canister '{}'.", canister.get_name()); continue; } + if !canister.builder.should_build(self, &canister.info, env.get_cache().as_ref())? { + continue; + } + result.push( self.step_prebuild(build_config, canister) .map_err(|e| { @@ -839,10 +847,12 @@ impl CanisterPool { /// Build all canisters, failing with the first that failed the build. Will return /// nothing if all succeeded. + /// + /// TODO: `log` can be got from `env`, can't it? #[context("Failed while trying to build all canisters.")] - pub async fn build_or_fail(&self, log: &Logger, build_config: &BuildConfig) -> DfxResult<()> { + pub async fn build_or_fail(&self, env: &dyn Environment, log: &Logger, build_config: &BuildConfig) -> DfxResult<()> { self.download(build_config).await?; - let outputs = self.build(log, build_config)?; + let outputs = self.build(env, log, build_config)?; for output in outputs { output.map_err(DfxError::new)?; diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 21060c8cf3..b7a0eb82b5 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -97,6 +97,8 @@ pub async fn deploy_canisters( .collect(), }; + // TODO: `CanisterPool::load` is called at least two times (second time by `build_canisters`). + let canister_pool = CanisterPool::load(env, false, canisters_to_build.as_slice())?; let canisters_to_install: Vec = canisters_to_build .clone() .into_iter() @@ -106,6 +108,13 @@ pub async fn deploy_canisters( // TODO: This is a hack. config.get_config().get_canister_config(canister_name).map_or( true, |canister_config| canister_config.deploy))) + .filter(|canister_name| + if let Some(canister) = canister_pool.get_first_canister_with_name(canister_name) { + canister.builder.should_build(&canister_pool, &canister.info, env.get_cache().as_ref()).unwrap() // FIXME: `unwrap()` + } else { + false + } + ) .collect(); if some_canister.is_some() { @@ -296,7 +305,7 @@ async fn build_canisters( BuildConfig::from_config(config, env.get_network_descriptor().is_playground())? .with_canisters_to_build(canisters_to_build.into()) .with_env_file(env_file); - canister_pool.build_or_fail(log, &build_config).await?; + canister_pool.build_or_fail(env, log, &build_config).await?; Ok(canister_pool) } From a16fcd69bc1b3a72b1f84f08c157e2ebd6e8066e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 01:30:37 +0300 Subject: [PATCH 132/354] small refactor --- src/dfx/src/lib/builders/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index ee5e979ac8..636f3ce7e1 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -239,7 +239,7 @@ pub trait CanisterBuilder { let rev_id_map: BTreeMap = pool .get_canister_list() .iter() - .map(|c| (c.canister_id().to_text(), c.get_name().to_string())) + .map(|&c| (c.canister_id().to_text(), c.get_name().to_string())) .collect(); if canister_info.is_motoko() { // hack From 964ee12c53fad0bba35a9337e4f43dd7d9611ae5 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 01:45:32 +0300 Subject: [PATCH 133/354] bug fix --- .../operations/canister/deploy_canisters.rs | 43 ++++++++++--------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index b7a0eb82b5..2a974f9cdc 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -97,6 +97,28 @@ pub async fn deploy_canisters( .collect(), }; + if canisters_to_deploy + .iter() + .any(|canister| initial_canister_id_store.find(canister).is_none()) + { + register_canisters( + env, + &canisters_to_deploy, + &initial_canister_id_store, + with_cycles, + specified_id_from_cli, + call_sender, + no_wallet, + from_subaccount, + created_at_time, + &config, + subnet_selection, + ) + .await?; + } else { + info!(env.get_logger(), "All canisters have already been created."); + } + // TODO: `CanisterPool::load` is called at least two times (second time by `build_canisters`). let canister_pool = CanisterPool::load(env, false, canisters_to_build.as_slice())?; let canisters_to_install: Vec = canisters_to_build @@ -122,27 +144,6 @@ pub async fn deploy_canisters( } else { info!(log, "Deploying all canisters."); } - if canisters_to_deploy - .iter() - .any(|canister| initial_canister_id_store.find(canister).is_none()) - { - register_canisters( - env, - &canisters_to_deploy, - &initial_canister_id_store, - with_cycles, - specified_id_from_cli, - call_sender, - no_wallet, - from_subaccount, - created_at_time, - &config, - subnet_selection, - ) - .await?; - } else { - info!(env.get_logger(), "All canisters have already been created."); - } let canisters_to_load = all_project_canisters_with_ids(env, &config); From 727b5102d344de8190ac00e428d26fae2baacb86 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 01:54:43 +0300 Subject: [PATCH 134/354] refactor --- src/dfx/src/lib/models/canister.rs | 35 +++++++++++++++++++----------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 614c3220ed..565de57ffb 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -633,6 +633,7 @@ impl CanisterPool { panic!("programming error"); } }; + println!("child_name: {}", child_name); let child_canister = self.get_first_canister_with_name(&child_name).unwrap().canister_id(); let dest_parent_id = *dest_id_set.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); @@ -756,20 +757,12 @@ impl CanisterPool { Ok(()) } - /// Build all canisters, returning a vector of results of each builds. - /// - /// TODO: `log` can be got from `env`, can't it? - #[context("Failed while trying to build all canisters in the canister pool.")] - pub fn build( + fn build_order( &self, env: &dyn Environment, - log: &Logger, build_config: &BuildConfig, - ) -> DfxResult>> { - self.step_prebuild_all(log, build_config) - .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; - - trace!(log, "Building dependencies graph."); + ) -> DfxResult> { + trace!(env.get_logger(), "Building dependencies graph."); let graph = self.build_dependencies_graph(build_config.canisters_to_build.clone())?; // TODO: Can `clone` be eliminated? let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { @@ -781,11 +774,27 @@ impl CanisterPool { }; BuildError::DependencyError(format!("Found circular dependency: {}", message)) })?; - let order: Vec = nodes + Ok(nodes .iter() .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. .map(|idx| *graph.node_weight(*idx).unwrap()) - .collect(); + .collect()) + } + + /// Build all canisters, returning a vector of results of each builds. + /// + /// TODO: `log` can be got from `env`, can't it? + #[context("Failed while trying to build all canisters in the canister pool.")] + pub fn build( + &self, + env: &dyn Environment, + log: &Logger, + build_config: &BuildConfig, + ) -> DfxResult>> { + self.step_prebuild_all(log, build_config) + .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; + + let order = self.build_order(env, build_config)?; // TODO: The next line is slow and confusing code. let canisters_to_build: Vec<&Arc> = self.canisters.iter().filter(|c| order.contains(&c.canister_id())).collect(); From 3535510ef4e4467fd928f74d25f2e801e44a9eee Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 16:13:39 +0300 Subject: [PATCH 135/354] bug fix --- src/dfx/src/lib/graph/traverse_filtered.rs | 27 ++++++++++ src/dfx/src/lib/models/canister.rs | 22 ++++---- .../operations/canister/deploy_canisters.rs | 54 ++++++++++--------- 3 files changed, 70 insertions(+), 33 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 24e7ce0ead..d0cb2ff946 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -2,6 +2,8 @@ use std::iter::once; use petgraph::{data::DataMap, visit::{Bfs, IntoNeighbors, VisitMap}}; +use crate::lib::error::DfxResult; + pub struct BfsFiltered { base: Bfs, // node_filter: P, @@ -14,6 +16,7 @@ impl BfsFiltered { } } + #[allow(unused)] pub fn traverse(&mut self, graph: G, mut predicate: P, mut call: C) where C: FnMut(&NodeId, &NodeId) -> (), G: IntoNeighbors + DataMap, @@ -33,4 +36,28 @@ impl BfsFiltered { } } } + + pub fn traverse2(&mut self, graph: G, mut predicate: P, mut call: C) -> DfxResult<()> + where C: FnMut(&NodeId, &NodeId) -> DfxResult<()>, + G: IntoNeighbors + DataMap, + P: FnMut(&NodeId) -> DfxResult, + NodeId: Copy + Eq, + VM: VisitMap, + { + if let Some(first_id) = self.base.next(graph) { + while let Some(source_child_id) = &self.base.next(graph) { + if (&mut predicate)(source_child_id)? { + // Requested to document the next line behavior in https://github.com/petgraph/petgraph/issues/634 + let source_parent_id = self.base.stack.iter().map(|e| *e).chain(once(first_id)) + .filter_map(|x| (&mut predicate)(&x) + .map_or_else(|e| Some(Err(e)), |v| if v { Some(Ok(x)) } else { None })) + .next().transpose()?; + if let Some(source_parent_id) = &source_parent_id { + (&mut call)(source_parent_id, &source_child_id)?; + } + } + } + } + Ok(()) + } } \ No newline at end of file diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 565de57ffb..890e98b80c 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -606,14 +606,14 @@ impl CanisterPool { let bfs = Bfs::new(&source_graph, start_node); let mut filtered_bfs = BfsFiltered::new(bfs); - filtered_bfs.traverse( + filtered_bfs.traverse2( source_graph, |&s| { let source_id = source_graph.node_weight(s); if let Some(Import::Canister(_)) = source_id { - true + Ok(true) } else { - false + Ok(false) } }, |&source_parent_id, &source_child_id| { @@ -634,13 +634,17 @@ impl CanisterPool { } }; println!("child_name: {}", child_name); - let child_canister = self.get_first_canister_with_name(&child_name).unwrap().canister_id(); + let child_canister = self.get_first_canister_with_name(&child_name) + .ok_or_else(|| anyhow!("A canister with the name '{}' was not found in the current project.", child_name.clone()))? + .canister_id(); let dest_parent_id = *dest_id_set.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); let dest_child_id = *dest_id_set.entry(source_child_id).or_insert_with(|| dest_graph.add_node(child_canister)); dest_graph.update_edge(dest_parent_id, dest_child_id, ()); + + Ok(()) } - ); + )?; } Ok(dest_graph) @@ -757,13 +761,13 @@ impl CanisterPool { Ok(()) } - fn build_order( + pub fn build_order( &self, env: &dyn Environment, - build_config: &BuildConfig, + canisters_to_build: &Option>, ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); - let graph = self.build_dependencies_graph(build_config.canisters_to_build.clone())?; // TODO: Can `clone` be eliminated? + let graph = self.build_dependencies_graph(canisters_to_build.clone())?; // TODO: Can `clone` be eliminated? let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { Some(canister_id) => match self.get_canister_info(canister_id) { @@ -794,7 +798,7 @@ impl CanisterPool { self.step_prebuild_all(log, build_config) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; - let order = self.build_order(env, build_config)?; + let order = self.build_order(env, &build_config.canisters_to_build.clone())?; // TODO: Eliminate `clone`.` // TODO: The next line is slow and confusing code. let canisters_to_build: Vec<&Arc> = self.canisters.iter().filter(|c| order.contains(&c.canister_id())).collect(); diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 2a974f9cdc..9a5b7102cb 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -97,31 +97,16 @@ pub async fn deploy_canisters( .collect(), }; - if canisters_to_deploy - .iter() - .any(|canister| initial_canister_id_store.find(canister).is_none()) - { - register_canisters( - env, - &canisters_to_deploy, - &initial_canister_id_store, - with_cycles, - specified_id_from_cli, - call_sender, - no_wallet, - from_subaccount, - created_at_time, - &config, - subnet_selection, - ) - .await?; - } else { - info!(env.get_logger(), "All canisters have already been created."); - } + // TODO: `CanisterPool::load` is called at least three times (including by `build_canisters`). + let preliminary_canister_pool = CanisterPool::load(env, false, canisters_to_build.as_slice())?; + + let order = preliminary_canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // FIXME: `Some` here is a hack. // TODO: Eliminate `clone`. + let order_names: Vec = order.iter() + .map(|canister| preliminary_canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); // FIXME: Is `unwrap` here correct? - // TODO: `CanisterPool::load` is called at least two times (second time by `build_canisters`). - let canister_pool = CanisterPool::load(env, false, canisters_to_build.as_slice())?; - let canisters_to_install: Vec = canisters_to_build + let canister_pool = CanisterPool::load(env, true, order_names.as_slice())?; // TODO: Is here `true` needed? + + let canisters_to_install: &Vec = &canisters_to_build .clone() .into_iter() .filter(|canister_name| @@ -144,6 +129,27 @@ pub async fn deploy_canisters( } else { info!(log, "Deploying all canisters."); } + if canisters_to_install + .iter() + .any(|canister| canister_pool.get_first_canister_with_name(canister).is_none()) + { + register_canisters( + env, + &canisters_to_deploy, + &initial_canister_id_store, + with_cycles, + specified_id_from_cli, + call_sender, + no_wallet, + from_subaccount, + created_at_time, + &config, + subnet_selection, + ) + .await?; + } else { + info!(env.get_logger(), "All canisters have already been created."); + } let canisters_to_load = all_project_canisters_with_ids(env, &config); From 3104ff6f46f5f3fcb8df322dbdf914abcc2492bb Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 16:14:33 +0300 Subject: [PATCH 136/354] removed tracing --- src/dfx/src/lib/models/canister.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 890e98b80c..dc6b04ade1 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -633,7 +633,6 @@ impl CanisterPool { panic!("programming error"); } }; - println!("child_name: {}", child_name); let child_canister = self.get_first_canister_with_name(&child_name) .ok_or_else(|| anyhow!("A canister with the name '{}' was not found in the current project.", child_name.clone()))? .canister_id(); From a07d9984838d6b9f010f549802fca5cbacc3e46b Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 17:13:30 +0300 Subject: [PATCH 137/354] bug fix --- src/dfx/src/lib/builders/motoko.rs | 6 +++++- src/dfx/src/lib/models/canister.rs | 14 +++++++++----- .../lib/operations/canister/deploy_canisters.rs | 10 +++++----- 3 files changed, 19 insertions(+), 11 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 6a49bdb359..db7c28f4ba 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -107,12 +107,16 @@ impl CanisterBuilder for MotokoBuilder { info: &CanisterInfo, ) -> DfxResult> { add_imports(self.cache.as_ref(), info, &mut *pool.imports.borrow_mut(), pool)?; + // TODO: In some reason, the following line is needed only for `deploy`, not for `build`. + let graph = &pool.imports.borrow().graph; match petgraph::algo::toposort(&pool.imports.borrow().graph, None) { Ok(order) => { Ok(order.into_iter().filter_map(|id| match graph.node_weight(id) { - Some(Import::Canister(name)) => pool.get_first_canister_with_name(name.as_str()), // TODO: a little inefficient + Some(Import::Canister(name)) => { + pool.get_first_canister_with_name(name.as_str()) // TODO: a little inefficient + } _ => None, }).map(|canister| canister.canister_id()).collect()) } diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index dc6b04ade1..ec535c21d3 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -560,8 +560,16 @@ impl CanisterPool { /// Build only dependencies relevant for `canisters_to_build`. #[context("Failed to build dependencies graph for canister pool.")] - fn build_dependencies_graph(&self, canisters_to_build: Option>) -> DfxResult> { + fn build_dependencies_graph( + &self, + canisters_to_build: Option> + ) -> DfxResult> { // println!("canisters_to_build: {:?}", canisters_to_build); + let real_canisters_to_build: Vec<_> = match canisters_to_build { + Some(ref canisters_to_build) => canisters_to_build.clone(), // TODO: Remove `clone()` + None => self.canisters.iter().map(|canister| canister.get_name().to_string()).collect(), + }; + for canister in &self.canisters { // a little inefficient let contains = if let Some(canisters_to_build) = &canisters_to_build { canisters_to_build.iter().contains(&canister.get_info().get_name().to_string()) @@ -575,10 +583,6 @@ impl CanisterPool { } } - let real_canisters_to_build: Vec<_> = match canisters_to_build { - Some(canisters_to_build) => canisters_to_build, - None => self.canisters.iter().map(|canister| canister.get_name().to_string()).collect(), - }; let source_graph = &self.imports.borrow().graph; let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 9a5b7102cb..40353f7e7d 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -98,13 +98,13 @@ pub async fn deploy_canisters( }; // TODO: `CanisterPool::load` is called at least three times (including by `build_canisters`). - let preliminary_canister_pool = CanisterPool::load(env, false, canisters_to_build.as_slice())?; + let canister_pool = CanisterPool::load(env, true, &canisters_to_deploy.as_slice())?; - let order = preliminary_canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // FIXME: `Some` here is a hack. // TODO: Eliminate `clone`. - let order_names: Vec = order.iter() - .map(|canister| preliminary_canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); // FIXME: Is `unwrap` here correct? + let order = canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // FIXME: `Some` here is a hack. // TODO: Eliminate `clone`. + // let order_names: Vec = order.iter() + // .map(|canister| canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); // FIXME: Is `unwrap` here correct? - let canister_pool = CanisterPool::load(env, true, order_names.as_slice())?; // TODO: Is here `true` needed? + // let canister_pool = CanisterPool::load(env, true, order_names.as_slice())?; // TODO: Is here `true` needed? let canisters_to_install: &Vec = &canisters_to_build .clone() From 7033768bbce75fa02f3fde9e383c862f5f8bc3da Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 17:23:53 +0300 Subject: [PATCH 138/354] bug fix --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 40353f7e7d..2e93123de5 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -29,6 +29,8 @@ use slog::info; use std::convert::TryFrom; use std::path::{Path, PathBuf}; +use super::add_canisters_with_ids; + #[derive(Eq, PartialEq, Debug, Clone)] pub enum DeployMode { NormalDeploy, @@ -97,8 +99,12 @@ pub async fn deploy_canisters( .collect(), }; + let required_canisters = config + .get_config() + .get_canister_names_with_dependencies(some_canister.as_deref())?; + let canisters_to_load = add_canisters_with_ids(&required_canisters, env, &config); // TODO: `CanisterPool::load` is called at least three times (including by `build_canisters`). - let canister_pool = CanisterPool::load(env, true, &canisters_to_deploy.as_slice())?; + let canister_pool = CanisterPool::load(env, true, &canisters_to_load)?; let order = canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // FIXME: `Some` here is a hack. // TODO: Eliminate `clone`. // let order_names: Vec = order.iter() From 9a70bbefec3dd4393861901c81c856cdf155af5e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 17:42:53 +0300 Subject: [PATCH 139/354] removed commented out code --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 2e93123de5..e438850573 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -110,8 +110,6 @@ pub async fn deploy_canisters( // let order_names: Vec = order.iter() // .map(|canister| canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); // FIXME: Is `unwrap` here correct? - // let canister_pool = CanisterPool::load(env, true, order_names.as_slice())?; // TODO: Is here `true` needed? - let canisters_to_install: &Vec = &canisters_to_build .clone() .into_iter() From a62f989f3fa59fff5bc194abecdb7120226de3d7 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 20:17:37 +0300 Subject: [PATCH 140/354] comment typo --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index ec535c21d3..12dc13ee27 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -801,7 +801,7 @@ impl CanisterPool { self.step_prebuild_all(log, build_config) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; - let order = self.build_order(env, &build_config.canisters_to_build.clone())?; // TODO: Eliminate `clone`.` + let order = self.build_order(env, &build_config.canisters_to_build.clone())?; // TODO: Eliminate `clone`. // TODO: The next line is slow and confusing code. let canisters_to_build: Vec<&Arc> = self.canisters.iter().filter(|c| order.contains(&c.canister_id())).collect(); From bc800c1ca496500d867585e435209346a990e90e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 20:17:59 +0300 Subject: [PATCH 141/354] bug fix --- .../operations/canister/deploy_canisters.rs | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index e438850573..75923e9443 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -79,6 +79,12 @@ pub async fn deploy_canisters( let canisters_to_deploy = canister_with_dependencies(&config, some_canister)?; + let required_canisters = config + .get_config() + .get_canister_names_with_dependencies(some_canister.as_deref())?; + let canisters_to_load = add_canisters_with_ids(&required_canisters, env, &config); + let canister_pool = CanisterPool::load(env, true, &canisters_to_load)?; + let canisters_to_build = match deploy_mode { PrepareForProposal(canister_name) | ComputeEvidence(canister_name) => { vec![canister_name.clone()] @@ -99,16 +105,10 @@ pub async fn deploy_canisters( .collect(), }; - let required_canisters = config - .get_config() - .get_canister_names_with_dependencies(some_canister.as_deref())?; - let canisters_to_load = add_canisters_with_ids(&required_canisters, env, &config); - // TODO: `CanisterPool::load` is called at least three times (including by `build_canisters`). - let canister_pool = CanisterPool::load(env, true, &canisters_to_load)?; - + // FIXME: `build_order` is called two times during deployment of a new canister. let order = canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // FIXME: `Some` here is a hack. // TODO: Eliminate `clone`. - // let order_names: Vec = order.iter() - // .map(|canister| canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); // FIXME: Is `unwrap` here correct? + let order_names: Vec = order.iter() + .map(|canister| canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); // FIXME: Is `unwrap` here correct? let canisters_to_install: &Vec = &canisters_to_build .clone() @@ -135,11 +135,11 @@ pub async fn deploy_canisters( } if canisters_to_install .iter() - .any(|canister| canister_pool.get_first_canister_with_name(canister).is_none()) + .any(|canister| initial_canister_id_store.find(canister).is_none()) { register_canisters( env, - &canisters_to_deploy, + &order_names, &initial_canister_id_store, with_cycles, specified_id_from_cli, @@ -155,12 +155,12 @@ pub async fn deploy_canisters( info!(env.get_logger(), "All canisters have already been created."); } - let canisters_to_load = all_project_canisters_with_ids(env, &config); + // let canisters_to_load = all_project_canisters_with_ids(env, &config); let pool = build_canisters( env, - &canisters_to_load, - &canisters_to_build, + &order_names, + &order_names, &config, env_file.clone(), ) From 174eaf68370950c4e9aaf91a13ab37c5332d88ec Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 20:18:22 +0300 Subject: [PATCH 142/354] code formatting --- src/dfx/src/lib/builders/motoko.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index db7c28f4ba..7b674c054a 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -108,7 +108,6 @@ impl CanisterBuilder for MotokoBuilder { ) -> DfxResult> { add_imports(self.cache.as_ref(), info, &mut *pool.imports.borrow_mut(), pool)?; // TODO: In some reason, the following line is needed only for `deploy`, not for `build`. - let graph = &pool.imports.borrow().graph; match petgraph::algo::toposort(&pool.imports.borrow().graph, None) { From 8792959b10968d3416fb9f7fcbe0e4ee9271286d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 22:57:14 +0300 Subject: [PATCH 143/354] fixes --- src/dfx/src/lib/graph/traverse_filtered.rs | 54 +++++++------------ .../operations/canister/deploy_canisters.rs | 2 +- 2 files changed, 20 insertions(+), 36 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index d0cb2ff946..cf99a3c520 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -1,6 +1,4 @@ -use std::iter::once; - -use petgraph::{data::DataMap, visit::{Bfs, IntoNeighbors, VisitMap}}; +use petgraph::{data::DataMap, visit::{Bfs, IntoNeighborsDirected, VisitMap}, Direction::Incoming}; use crate::lib::error::DfxResult; @@ -16,44 +14,30 @@ impl BfsFiltered { } } - #[allow(unused)] - pub fn traverse(&mut self, graph: G, mut predicate: P, mut call: C) - where C: FnMut(&NodeId, &NodeId) -> (), - G: IntoNeighbors + DataMap, - P: FnMut(&NodeId) -> bool, - NodeId: Copy + Eq, - VM: VisitMap, - { - if let Some(first_id) = self.base.next(graph) { - while let Some(source_child_id) = &self.base.next(graph) { - if (&mut predicate)(source_child_id) { - // Requested to document the next line behavior in https://github.com/petgraph/petgraph/issues/634 - let source_parent_id = self.base.stack.iter().map(|e| *e).chain(once(first_id)).find(&mut predicate); - if let Some(source_parent_id) = &source_parent_id { - (&mut call)(source_parent_id, &source_child_id); - } - } - } - } - } - + /// TODO: Refactor: Extract `iter` function from here. pub fn traverse2(&mut self, graph: G, mut predicate: P, mut call: C) -> DfxResult<()> where C: FnMut(&NodeId, &NodeId) -> DfxResult<()>, - G: IntoNeighbors + DataMap, + G: IntoNeighborsDirected + DataMap, P: FnMut(&NodeId) -> DfxResult, NodeId: Copy + Eq, VM: VisitMap, { - if let Some(first_id) = self.base.next(graph) { - while let Some(source_child_id) = &self.base.next(graph) { - if (&mut predicate)(source_child_id)? { - // Requested to document the next line behavior in https://github.com/petgraph/petgraph/issues/634 - let source_parent_id = self.base.stack.iter().map(|e| *e).chain(once(first_id)) - .filter_map(|x| (&mut predicate)(&x) - .map_or_else(|e| Some(Err(e)), |v| if v { Some(Ok(x)) } else { None })) - .next().transpose()?; - if let Some(source_parent_id) = &source_parent_id { - (&mut call)(source_parent_id, &source_child_id)?; + while let Some(source_child_id) = &self.base.next(graph) { + if (&mut predicate)(source_child_id)? { + let mut source_parent_iter = graph.neighbors_directed(*source_child_id, Incoming); + let mut source_parent_id; + if let Some(id1) = source_parent_iter.next() { + source_parent_id = id1; + loop { + if (&mut predicate)(&source_parent_id)? { + (&mut call)(&source_parent_id, &source_child_id)?; + break; + } + if let Some(id2) = source_parent_iter.next() { + source_parent_id = id2; + } else { + break; + } } } } diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 75923e9443..9d394477b1 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -11,7 +11,7 @@ use crate::lib::operations::canister::deploy_canisters::DeployMode::{ }; use crate::lib::operations::canister::motoko_playground::reserve_canister_with_playground; use crate::lib::operations::canister::{ - all_project_canisters_with_ids, create_canister, install_canister::install_canister, + create_canister, install_canister::install_canister, }; use crate::util::clap::subnet_selection_opt::SubnetSelectionType; use anyhow::{anyhow, bail, Context}; From 88f5d984629ae53043ca9bbc684919bf8552fb1d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 22 Apr 2024 23:06:29 +0300 Subject: [PATCH 144/354] comment --- src/dfx/src/lib/graph/traverse_filtered.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index cf99a3c520..caae7cf605 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -1,3 +1,4 @@ +// TODO: Somebody, adopt this code (and DFS) to `petgraph`. use petgraph::{data::DataMap, visit::{Bfs, IntoNeighborsDirected, VisitMap}, Direction::Incoming}; use crate::lib::error::DfxResult; From 88e7b64f35e8da26d3b4a239f2c7af429cd2a25e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 23 Apr 2024 00:13:43 +0300 Subject: [PATCH 145/354] misc --- src/dfx/src/lib/builders/mod.rs | 5 ++++- src/dfx/src/lib/builders/motoko.rs | 2 +- src/dfx/src/lib/models/canister.rs | 1 + .../operations/canister/deploy_canisters.rs | 19 +++++++++++-------- 4 files changed, 17 insertions(+), 10 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 636f3ce7e1..cfa6d69a53 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -260,9 +260,12 @@ pub trait CanisterBuilder { // node_index }; let mut import_iter = Bfs::new(&imports.graph, start); + println!("START {:?}", output_wasm_path); loop { if let Some(import) = import_iter.next(&imports.graph) { - let imported_file = match &imports.graph[import] { + let subnode = &imports.graph[import]; + println!("NEXT {:?}", subnode); + let imported_file = match subnode { Import::Canister(canister_name) => { // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = canister.get_info().get_main_file(); diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 7b674c054a..0b0ec0270f 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -60,7 +60,7 @@ pub fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut Imports if let Some(_) = imports.nodes.get(&parent) { // The item is already in the graph. return Ok(()); } else { - imports.nodes.insert(parent.clone(), imports.graph.add_node(parent.clone()),); + imports.nodes.insert(parent.clone(), imports.graph.add_node(parent.clone())); } let mut command = cache.get_binary_command("moc")?; diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 12dc13ee27..2e8ee5f61a 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -640,6 +640,7 @@ impl CanisterPool { let child_canister = self.get_first_canister_with_name(&child_name) .ok_or_else(|| anyhow!("A canister with the name '{}' was not found in the current project.", child_name.clone()))? .canister_id(); + println!("ZZZ: {:?} => {:?}", parent, child); let dest_parent_id = *dest_id_set.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); let dest_child_id = *dest_id_set.entry(source_child_id).or_insert_with(|| dest_graph.add_node(child_canister)); diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 9d394477b1..79e98e101d 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -157,12 +157,14 @@ pub async fn deploy_canisters( // let canisters_to_load = all_project_canisters_with_ids(env, &config); - let pool = build_canisters( + // let pool = canister_pool; // TODO + build_canisters( env, - &order_names, + // &order_names, &order_names, &config, env_file.clone(), + &canister_pool, ) .await?; @@ -179,7 +181,7 @@ pub async fn deploy_canisters( force_reinstall, upgrade_unchanged, call_sender, - pool, + canister_pool, skip_consent, env_file.as_deref(), no_asset_upgrade, @@ -302,22 +304,23 @@ async fn register_canisters( #[context("Failed to build all canisters.")] async fn build_canisters( env: &dyn Environment, - canisters_to_load: &[String], + // canisters_to_load: &[String], canisters_to_build: &[String], config: &Config, env_file: Option, -) -> DfxResult { + canister_pool: &CanisterPool, +) -> DfxResult<()> { let log = env.get_logger(); info!(log, "Building canisters..."); - let build_mode_check = false; - let canister_pool = CanisterPool::load(env, build_mode_check, canisters_to_load)?; + // let build_mode_check = false; + // let canister_pool = CanisterPool::load(env, build_mode_check, canisters_to_load)?; let build_config = BuildConfig::from_config(config, env.get_network_descriptor().is_playground())? .with_canisters_to_build(canisters_to_build.into()) .with_env_file(env_file); canister_pool.build_or_fail(env, log, &build_config).await?; - Ok(canister_pool) + Ok(()) } #[context("Failed while trying to install all canisters.")] From a8f18a24b5bce6f65432ff4b650969c0566cf2a2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Tue, 23 Apr 2024 18:32:40 +0300 Subject: [PATCH 146/354] removed tracing --- src/dfx/src/lib/builders/mod.rs | 2 -- src/dfx/src/lib/models/canister.rs | 1 - 2 files changed, 3 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index cfa6d69a53..dacf6a7f38 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -260,11 +260,9 @@ pub trait CanisterBuilder { // node_index }; let mut import_iter = Bfs::new(&imports.graph, start); - println!("START {:?}", output_wasm_path); loop { if let Some(import) = import_iter.next(&imports.graph) { let subnode = &imports.graph[import]; - println!("NEXT {:?}", subnode); let imported_file = match subnode { Import::Canister(canister_name) => { // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 2e8ee5f61a..12dc13ee27 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -640,7 +640,6 @@ impl CanisterPool { let child_canister = self.get_first_canister_with_name(&child_name) .ok_or_else(|| anyhow!("A canister with the name '{}' was not found in the current project.", child_name.clone()))? .canister_id(); - println!("ZZZ: {:?} => {:?}", parent, child); let dest_parent_id = *dest_id_set.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); let dest_child_id = *dest_id_set.entry(source_child_id).or_insert_with(|| dest_graph.add_node(child_canister)); From 3ce3ddc751075f56e4e39fd025a49bfaba379e4a Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 00:46:45 +0300 Subject: [PATCH 147/354] started to work on automated testing --- e2e/assets/make_like/dependency.mo | 5 + e2e/assets/make_like/dependent.mo | 5 + e2e/assets/make_like/dfx.json | 19 +++ e2e/tests-dfx/make_like.bash | 237 +++++++++++++++++++++++++++++ 4 files changed, 266 insertions(+) create mode 100644 e2e/assets/make_like/dependency.mo create mode 100644 e2e/assets/make_like/dependent.mo create mode 100644 e2e/assets/make_like/dfx.json create mode 100644 e2e/tests-dfx/make_like.bash diff --git a/e2e/assets/make_like/dependency.mo b/e2e/assets/make_like/dependency.mo new file mode 100644 index 0000000000..99b0f43d65 --- /dev/null +++ b/e2e/assets/make_like/dependency.mo @@ -0,0 +1,5 @@ +actor { + public query func greet(name: Text) : async Text { + return "Hello, " # name # "!"; + } +} \ No newline at end of file diff --git a/e2e/assets/make_like/dependent.mo b/e2e/assets/make_like/dependent.mo new file mode 100644 index 0000000000..887f0c5e42 --- /dev/null +++ b/e2e/assets/make_like/dependent.mo @@ -0,0 +1,5 @@ +actor { + public func greet(name : Text) : async Text { + return "Hello, " # name # "!"; + }; +}; diff --git a/e2e/assets/make_like/dfx.json b/e2e/assets/make_like/dfx.json new file mode 100644 index 0000000000..31c3b730b3 --- /dev/null +++ b/e2e/assets/make_like/dfx.json @@ -0,0 +1,19 @@ +{ + "version": 1, + "canisters": { + "dependency": { + "main": "dependency.mo" + }, + "dependent": { + "main": "dependent.mo", + "dependencies": [ + "dependency" + ] + } + }, + "networks": { + "local": { + "bind": "127.0.0.1:8000" + } + } +} \ No newline at end of file diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash new file mode 100644 index 0000000000..6e35841091 --- /dev/null +++ b/e2e/tests-dfx/make_like.bash @@ -0,0 +1,237 @@ +#!/usr/bin/env bats + +load ../utils/_ +# load ../utils/cycles-ledger + +setup() { + standard_setup + + install_asset make_like +} + +teardown() { + dfx_stop + + standard_teardown +} + +@test "trying to break dependency compiling" { + dfx_start + assert_command dfx deploy -vv dependent + assert_contains '"dependent.mo"' + assert_contains '"dependency.mo"' +} + +# @test "deploy --upgrade-unchanged upgrades even if the .wasm did not change" { +# dfx_start +# assert_command dfx deploy + +# assert_command dfx deploy +# assert_match "Module hash.*is already installed" + +# assert_command dfx deploy --upgrade-unchanged +# assert_not_match "Module hash.*is already installed" +# } + +# @test "deploy without --no-wallet sets wallet and self as the controllers" { +# dfx_start +# WALLET=$(dfx identity get-wallet) +# PRINCIPAL=$(dfx identity get-principal) +# assert_command dfx deploy hello_backend +# assert_command dfx canister info hello_backend +# assert_match "Controllers: ($WALLET $PRINCIPAL|$PRINCIPAL $WALLET)" +# } + +# @test "deploy --no-wallet sets only self as the controller" { +# dfx_start +# WALLET=$(dfx identity get-wallet) +# PRINCIPAL=$(dfx identity get-principal) +# assert_command dfx deploy hello_backend --no-wallet +# assert_command dfx canister info hello_backend +# assert_not_match "Controllers: ($WALLET $PRINCIPAL|$PRINCIPAL $WALLET)" +# assert_match "Controllers: $PRINCIPAL" +# } + +# @test "deploy from a subdirectory" { +# dfx_new hello +# dfx_start +# install_asset greet + +# ( +# cd src +# assert_command dfx deploy +# assert_match "Installing code for" +# ) + +# assert_command dfx canister call hello_backend greet '("Banzai")' +# assert_eq '("Hello, Banzai!")' + +# assert_command dfx deploy +# assert_not_match "Installing code for" +# assert_match "is already installed" +# } + +# @test "deploying multiple canisters with arguments fails" { +# assert_command_fail dfx deploy --argument hello +# assert_contains "The init argument can only be set when deploying a single canister." +# } + +# @test "deploy one canister with an argument" { +# dfx_start +# assert_command dfx deploy hello_backend --argument '()' +# } + +# @test "deploy one canister specifying raw argument" { +# dfx_start +# assert_command dfx deploy hello_backend --argument '4449444c0000' --argument-type raw +# } + +# @test "deploy with an argument in a file" { +# dfx_start +# TMPFILE="$(mktemp)" +# echo '()' >"$TMPFILE" +# assert_command dfx deploy hello_backend --argument-file "$TMPFILE" +# } + +# @test "deploying a dependent doesn't require already-installed dependencies to take args" { +# install_asset deploy_deps +# dfx_start +# assert_command dfx deploy dependency --argument '("dfx")' +# touch dependency.mo +# assert_command dfx deploy dependent +# assert_command dfx canister call dependency greet +# assert_match "Hello, dfx!" +# } + +# @test "deploy succeeds if init_arg is defined in dfx.json" { +# install_asset deploy_deps +# dfx_start +# jq '.canisters.dependency.init_arg="(\"dfx\")"' dfx.json | sponge dfx.json +# assert_command dfx deploy dependency +# assert_command dfx canister call dependency greet +# assert_match "Hello, dfx!" + +# assert_command dfx deploy dependency --mode reinstall --yes --argument '("icp")' +# assert_contains "Canister 'dependency' has init_arg in dfx.json: (\"dfx\")," +# assert_contains "which is different from the one specified in the command line: (\"icp\")." +# assert_contains "The command line value will be used." +# assert_command dfx canister call dependency greet +# assert_match "Hello, icp!" +# } + +# @test "reinstalling a single Motoko canister with imported dependency works" { +# install_asset import_canister +# dfx_start +# assert_command dfx deploy +# assert_command dfx deploy importer --mode reinstall --yes +# } + +# @test "deploy succeeds with --specified-id" { +# dfx_start +# assert_command dfx deploy hello_backend --specified-id n5n4y-3aaaa-aaaaa-p777q-cai +# assert_command dfx canister id hello_backend +# assert_match n5n4y-3aaaa-aaaaa-p777q-cai +# } + +# @test "deploy fails if --specified-id without canister_name" { +# dfx_start +# assert_command_fail dfx deploy --specified-id n5n4y-3aaaa-aaaaa-p777q-cai +# assert_match \ +# "error: the following required arguments were not provided: +# " +# } + +# @test "deploy succeeds when specify canister ID in dfx.json" { +# dfx_start +# jq '.canisters.hello_backend.specified_id="n5n4y-3aaaa-aaaaa-p777q-cai"' dfx.json | sponge dfx.json +# assert_command dfx deploy hello_backend +# assert_command dfx canister id hello_backend +# assert_match n5n4y-3aaaa-aaaaa-p777q-cai +# } + +# @test "deploy succeeds when specify canister ID both in dfx.json and cli; warning if different; cli value takes effect" { +# dfx_start +# jq '.canisters.hello_backend.specified_id="n5n4y-3aaaa-aaaaa-p777q-cai"' dfx.json | sponge dfx.json +# assert_command dfx deploy hello_backend --specified-id hhn2s-5l777-77777-7777q-cai +# assert_contains "WARN: Canister 'hello_backend' has a specified ID in dfx.json: n5n4y-3aaaa-aaaaa-p777q-cai," +# assert_contains "which is different from the one specified in the command line: hhn2s-5l777-77777-7777q-cai." +# assert_contains "The command line value will be used." + +# assert_command dfx canister id hello_backend +# assert_match hhn2s-5l777-77777-7777q-cai +# } + +# @test "deploy does not require wallet if all canisters are created" { +# dfx_start +# dfx canister create --all --no-wallet +# assert_command dfx deploy +# assert_not_contains "Creating a wallet canister" +# assert_command dfx identity get-wallet +# assert_contains "Creating a wallet canister" +# } + +# @test "can deploy gzip wasm" { +# jq '.canisters.hello_backend.gzip=true' dfx.json | sponge dfx.json +# dfx_start +# assert_command dfx deploy +# BUILD_HASH="0x$(sha256sum .dfx/local/canisters/hello_backend/hello_backend.wasm.gz | cut -d " " -f 1)" +# ONCHAIN_HASH="$(dfx canister info hello_backend | tail -n 1 | cut -d " " -f 3)" +# assert_eq "$BUILD_HASH" "$ONCHAIN_HASH" +# } + +# @test "prints the frontend url after deploy" { +# dfx_new_frontend hello +# dfx_start +# assert_command dfx deploy +# frontend_id=$(dfx canister id hello_frontend) +# assert_match "http://127.0.0.1.+${frontend_id}" +# assert_match "${frontend_id}.localhost" +# } + +# @test "prints the frontend url if 'frontend' section is not present in dfx.json" { +# dfx_new_frontend hello +# jq 'del(.canisters.hello_frontend.frontend)' dfx.json | sponge dfx.json +# dfx_start +# assert_command dfx deploy +# frontend_id=$(dfx canister id hello_frontend) +# assert_match "http://127.0.0.1.+${frontend_id}" +# assert_match "${frontend_id}.localhost" +# } + +# @test "prints the frontend url if the frontend section has been removed after initial deployment" { +# dfx_new_frontend hello +# dfx_start +# assert_command dfx deploy +# frontend_id=$(dfx canister id hello_frontend) +# assert_match "http://127.0.0.1.+${frontend_id}" +# assert_match "${frontend_id}.localhost" +# jq 'del(.canisters.hello_frontend.frontend)' dfx.json | sponge dfx.json +# assert_command dfx deploy +# assert_match "http://127.0.0.1.+${frontend_id}" +# assert_match "${frontend_id}.localhost" +# } + +# @test "subnet targetting" { +# # fake cmc setup +# cd .. +# dfx_new fake_cmc +# install_asset fake_cmc +# install_cycles_ledger_canisters +# dfx_start +# assert_command dfx deploy fake-cmc --specified-id "rkp4c-7iaaa-aaaaa-aaaca-cai" # CMC canister id +# cd ../hello + +# # use --subnet +# SUBNET_ID="5kdm2-62fc6-fwnja-hutkz-ycsnm-4z33i-woh43-4cenu-ev7mi-gii6t-4ae" # a random, valid principal +# assert_command dfx deploy hello_backend --subnet "$SUBNET_ID" +# cd ../fake_cmc +# assert_command dfx canister call fake-cmc last_create_canister_args +# assert_contains "subnet = principal \"$SUBNET_ID\";" + +# # use --subnet-type +# cd ../hello +# assert_command dfx deploy hello_frontend --subnet-type custom_subnet_type +# cd ../fake_cmc +# assert_command dfx canister call fake-cmc last_create_canister_args +# assert_contains 'subnet_type = opt "custom_subnet_type"' +# } From c1b6831df3734345c3bb8c0eb0d83d033e6daa4d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 00:51:53 +0300 Subject: [PATCH 148/354] test breaking dependency compiling --- e2e/tests-dfx/make_like.bash | 221 ++--------------------------------- 1 file changed, 8 insertions(+), 213 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 6e35841091..78c47a8be5 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -20,218 +20,13 @@ teardown() { assert_command dfx deploy -vv dependent assert_contains '"dependent.mo"' assert_contains '"dependency.mo"' -} - -# @test "deploy --upgrade-unchanged upgrades even if the .wasm did not change" { -# dfx_start -# assert_command dfx deploy - -# assert_command dfx deploy -# assert_match "Module hash.*is already installed" - -# assert_command dfx deploy --upgrade-unchanged -# assert_not_match "Module hash.*is already installed" -# } - -# @test "deploy without --no-wallet sets wallet and self as the controllers" { -# dfx_start -# WALLET=$(dfx identity get-wallet) -# PRINCIPAL=$(dfx identity get-principal) -# assert_command dfx deploy hello_backend -# assert_command dfx canister info hello_backend -# assert_match "Controllers: ($WALLET $PRINCIPAL|$PRINCIPAL $WALLET)" -# } - -# @test "deploy --no-wallet sets only self as the controller" { -# dfx_start -# WALLET=$(dfx identity get-wallet) -# PRINCIPAL=$(dfx identity get-principal) -# assert_command dfx deploy hello_backend --no-wallet -# assert_command dfx canister info hello_backend -# assert_not_match "Controllers: ($WALLET $PRINCIPAL|$PRINCIPAL $WALLET)" -# assert_match "Controllers: $PRINCIPAL" -# } - -# @test "deploy from a subdirectory" { -# dfx_new hello -# dfx_start -# install_asset greet - -# ( -# cd src -# assert_command dfx deploy -# assert_match "Installing code for" -# ) - -# assert_command dfx canister call hello_backend greet '("Banzai")' -# assert_eq '("Hello, Banzai!")' - -# assert_command dfx deploy -# assert_not_match "Installing code for" -# assert_match "is already installed" -# } - -# @test "deploying multiple canisters with arguments fails" { -# assert_command_fail dfx deploy --argument hello -# assert_contains "The init argument can only be set when deploying a single canister." -# } - -# @test "deploy one canister with an argument" { -# dfx_start -# assert_command dfx deploy hello_backend --argument '()' -# } - -# @test "deploy one canister specifying raw argument" { -# dfx_start -# assert_command dfx deploy hello_backend --argument '4449444c0000' --argument-type raw -# } - -# @test "deploy with an argument in a file" { -# dfx_start -# TMPFILE="$(mktemp)" -# echo '()' >"$TMPFILE" -# assert_command dfx deploy hello_backend --argument-file "$TMPFILE" -# } - -# @test "deploying a dependent doesn't require already-installed dependencies to take args" { -# install_asset deploy_deps -# dfx_start -# assert_command dfx deploy dependency --argument '("dfx")' -# touch dependency.mo -# assert_command dfx deploy dependent -# assert_command dfx canister call dependency greet -# assert_match "Hello, dfx!" -# } -# @test "deploy succeeds if init_arg is defined in dfx.json" { -# install_asset deploy_deps -# dfx_start -# jq '.canisters.dependency.init_arg="(\"dfx\")"' dfx.json | sponge dfx.json -# assert_command dfx deploy dependency -# assert_command dfx canister call dependency greet -# assert_match "Hello, dfx!" - -# assert_command dfx deploy dependency --mode reinstall --yes --argument '("icp")' -# assert_contains "Canister 'dependency' has init_arg in dfx.json: (\"dfx\")," -# assert_contains "which is different from the one specified in the command line: (\"icp\")." -# assert_contains "The command line value will be used." -# assert_command dfx canister call dependency greet -# assert_match "Hello, icp!" -# } - -# @test "reinstalling a single Motoko canister with imported dependency works" { -# install_asset import_canister -# dfx_start -# assert_command dfx deploy -# assert_command dfx deploy importer --mode reinstall --yes -# } - -# @test "deploy succeeds with --specified-id" { -# dfx_start -# assert_command dfx deploy hello_backend --specified-id n5n4y-3aaaa-aaaaa-p777q-cai -# assert_command dfx canister id hello_backend -# assert_match n5n4y-3aaaa-aaaaa-p777q-cai -# } - -# @test "deploy fails if --specified-id without canister_name" { -# dfx_start -# assert_command_fail dfx deploy --specified-id n5n4y-3aaaa-aaaaa-p777q-cai -# assert_match \ -# "error: the following required arguments were not provided: -# " -# } - -# @test "deploy succeeds when specify canister ID in dfx.json" { -# dfx_start -# jq '.canisters.hello_backend.specified_id="n5n4y-3aaaa-aaaaa-p777q-cai"' dfx.json | sponge dfx.json -# assert_command dfx deploy hello_backend -# assert_command dfx canister id hello_backend -# assert_match n5n4y-3aaaa-aaaaa-p777q-cai -# } - -# @test "deploy succeeds when specify canister ID both in dfx.json and cli; warning if different; cli value takes effect" { -# dfx_start -# jq '.canisters.hello_backend.specified_id="n5n4y-3aaaa-aaaaa-p777q-cai"' dfx.json | sponge dfx.json -# assert_command dfx deploy hello_backend --specified-id hhn2s-5l777-77777-7777q-cai -# assert_contains "WARN: Canister 'hello_backend' has a specified ID in dfx.json: n5n4y-3aaaa-aaaaa-p777q-cai," -# assert_contains "which is different from the one specified in the command line: hhn2s-5l777-77777-7777q-cai." -# assert_contains "The command line value will be used." - -# assert_command dfx canister id hello_backend -# assert_match hhn2s-5l777-77777-7777q-cai -# } - -# @test "deploy does not require wallet if all canisters are created" { -# dfx_start -# dfx canister create --all --no-wallet -# assert_command dfx deploy -# assert_not_contains "Creating a wallet canister" -# assert_command dfx identity get-wallet -# assert_contains "Creating a wallet canister" -# } - -# @test "can deploy gzip wasm" { -# jq '.canisters.hello_backend.gzip=true' dfx.json | sponge dfx.json -# dfx_start -# assert_command dfx deploy -# BUILD_HASH="0x$(sha256sum .dfx/local/canisters/hello_backend/hello_backend.wasm.gz | cut -d " " -f 1)" -# ONCHAIN_HASH="$(dfx canister info hello_backend | tail -n 1 | cut -d " " -f 3)" -# assert_eq "$BUILD_HASH" "$ONCHAIN_HASH" -# } - -# @test "prints the frontend url after deploy" { -# dfx_new_frontend hello -# dfx_start -# assert_command dfx deploy -# frontend_id=$(dfx canister id hello_frontend) -# assert_match "http://127.0.0.1.+${frontend_id}" -# assert_match "${frontend_id}.localhost" -# } - -# @test "prints the frontend url if 'frontend' section is not present in dfx.json" { -# dfx_new_frontend hello -# jq 'del(.canisters.hello_frontend.frontend)' dfx.json | sponge dfx.json -# dfx_start -# assert_command dfx deploy -# frontend_id=$(dfx canister id hello_frontend) -# assert_match "http://127.0.0.1.+${frontend_id}" -# assert_match "${frontend_id}.localhost" -# } - -# @test "prints the frontend url if the frontend section has been removed after initial deployment" { -# dfx_new_frontend hello -# dfx_start -# assert_command dfx deploy -# frontend_id=$(dfx canister id hello_frontend) -# assert_match "http://127.0.0.1.+${frontend_id}" -# assert_match "${frontend_id}.localhost" -# jq 'del(.canisters.hello_frontend.frontend)' dfx.json | sponge dfx.json -# assert_command dfx deploy -# assert_match "http://127.0.0.1.+${frontend_id}" -# assert_match "${frontend_id}.localhost" -# } - -# @test "subnet targetting" { -# # fake cmc setup -# cd .. -# dfx_new fake_cmc -# install_asset fake_cmc -# install_cycles_ledger_canisters -# dfx_start -# assert_command dfx deploy fake-cmc --specified-id "rkp4c-7iaaa-aaaaa-aaaca-cai" # CMC canister id -# cd ../hello + touch dependent.mo + assert_contains '"dependent.mo"' + assert_not_contains '"dependency.mo"' -# # use --subnet -# SUBNET_ID="5kdm2-62fc6-fwnja-hutkz-ycsnm-4z33i-woh43-4cenu-ev7mi-gii6t-4ae" # a random, valid principal -# assert_command dfx deploy hello_backend --subnet "$SUBNET_ID" -# cd ../fake_cmc -# assert_command dfx canister call fake-cmc last_create_canister_args -# assert_contains "subnet = principal \"$SUBNET_ID\";" - -# # use --subnet-type -# cd ../hello -# assert_command dfx deploy hello_frontend --subnet-type custom_subnet_type -# cd ../fake_cmc -# assert_command dfx canister call fake-cmc last_create_canister_args -# assert_contains 'subnet_type = opt "custom_subnet_type"' -# } + # TODO + # touch dependency.mo + # assert_contains '"dependent.mo"' + # assert_contains '"dependency.mo"' +} From 65e79f23abee4e99696c64459a34211b44c6d207 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 00:55:42 +0300 Subject: [PATCH 149/354] fix bug in tests --- e2e/tests-dfx/make_like.bash | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 78c47a8be5..2e1e34c580 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -22,11 +22,12 @@ teardown() { assert_contains '"dependency.mo"' touch dependent.mo + assert_command dfx deploy -vv dependent assert_contains '"dependent.mo"' assert_not_contains '"dependency.mo"' - # TODO - # touch dependency.mo - # assert_contains '"dependent.mo"' - # assert_contains '"dependency.mo"' + touch dependency.mo + assert_command dfx deploy -vv dependent + assert_contains '"dependent.mo"' + assert_contains '"dependency.mo"' } From 18a369d397fbff16e5b1510e2f3c4c1f370c2071 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 01:55:12 +0300 Subject: [PATCH 150/354] trying to rectify a test --- e2e/assets/make_like/dependency.mo | 2 +- e2e/assets/make_like/dependent.mo | 5 ++++- e2e/assets/make_like/lib.mo | 1 + e2e/tests-dfx/make_like.bash | 19 +++++++++++-------- 4 files changed, 17 insertions(+), 10 deletions(-) create mode 100644 e2e/assets/make_like/lib.mo diff --git a/e2e/assets/make_like/dependency.mo b/e2e/assets/make_like/dependency.mo index 99b0f43d65..9806b1fdec 100644 --- a/e2e/assets/make_like/dependency.mo +++ b/e2e/assets/make_like/dependency.mo @@ -1,5 +1,5 @@ actor { - public query func greet(name: Text) : async Text { + public shared func greet(name: Text) : async Text { return "Hello, " # name # "!"; } } \ No newline at end of file diff --git a/e2e/assets/make_like/dependent.mo b/e2e/assets/make_like/dependent.mo index 887f0c5e42..1aab542d75 100644 --- a/e2e/assets/make_like/dependent.mo +++ b/e2e/assets/make_like/dependent.mo @@ -1,5 +1,8 @@ +import L "lib"; +import D "canister:dependency"; + actor { - public func greet(name : Text) : async Text { + public shared func greet(name : Text) : async Text { return "Hello, " # name # "!"; }; }; diff --git a/e2e/assets/make_like/lib.mo b/e2e/assets/make_like/lib.mo new file mode 100644 index 0000000000..b1facbe3eb --- /dev/null +++ b/e2e/assets/make_like/lib.mo @@ -0,0 +1 @@ +module {} \ No newline at end of file diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 2e1e34c580..5abeea63f1 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -17,17 +17,20 @@ teardown() { @test "trying to break dependency compiling" { dfx_start + assert_command dfx deploy -vv dependent assert_contains '"dependent.mo"' assert_contains '"dependency.mo"' - touch dependent.mo - assert_command dfx deploy -vv dependent - assert_contains '"dependent.mo"' - assert_not_contains '"dependency.mo"' + test "$(ls .dfx/local/canisters/idl)" != "" - touch dependency.mo - assert_command dfx deploy -vv dependent - assert_contains '"dependent.mo"' - assert_contains '"dependency.mo"' + # touch dependent.mo + # assert_command dfx deploy -vv dependent + # assert_contains '"dependent.mo"' + # assert_not_contains '"dependency.mo"' + + # # touch dependency.mo + # # assert_command dfx deploy -vv dependent + # # assert_contains '"dependent.mo"' + # # assert_contains '"dependency.mo"' } From 3b184cacd043ee8fd5ae51e3b7cc8de46263ab3e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 03:07:24 +0300 Subject: [PATCH 151/354] misc --- e2e/tests-dfx/make_like.bash | 9 ++++++--- src/dfx/src/lib/models/canister.rs | 1 + 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 5abeea63f1..ec18e4944a 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -18,9 +18,12 @@ teardown() { @test "trying to break dependency compiling" { dfx_start - assert_command dfx deploy -vv dependent - assert_contains '"dependent.mo"' - assert_contains '"dependency.mo"' + assert_command dfx canister create dependent + assert_command dfx canister create dependency + assert_command dfx build -vv dependency + assert_command dfx build -vv dependent + # assert_contains '"dependent.mo"' + # assert_contains '"dependency.mo"' test "$(ls .dfx/local/canisters/idl)" != "" diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 12dc13ee27..fa14a47aaa 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -655,6 +655,7 @@ impl CanisterPool { #[context("Failed step_prebuild_all.")] fn step_prebuild_all(&self, log: &Logger, build_config: &BuildConfig) -> DfxResult<()> { + println!("step_prebuild_all: {:?}", self.canisters.iter().map(|c| c.get_name()).collect::>()); // moc expects all .did files of dependencies to be in with name .did. // Because some canisters don't get built these .did files have to be copied over manually. for canister in self.canisters.iter().filter(|c| { From 108b233a436d8bcb1be060e84cd8d19e8f2e1bad Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 04:22:51 +0300 Subject: [PATCH 152/354] bug fix --- e2e/tests-dfx/make_like.bash | 31 ++++++++++------------- src/dfx/src/lib/models/canister.rs | 40 +++++++++++++++++++++--------- 2 files changed, 41 insertions(+), 30 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index ec18e4944a..7bc8db02fa 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -18,22 +18,17 @@ teardown() { @test "trying to break dependency compiling" { dfx_start - assert_command dfx canister create dependent - assert_command dfx canister create dependency - assert_command dfx build -vv dependency - assert_command dfx build -vv dependent - # assert_contains '"dependent.mo"' - # assert_contains '"dependency.mo"' - - test "$(ls .dfx/local/canisters/idl)" != "" - - # touch dependent.mo - # assert_command dfx deploy -vv dependent - # assert_contains '"dependent.mo"' - # assert_not_contains '"dependency.mo"' - - # # touch dependency.mo - # # assert_command dfx deploy -vv dependent - # # assert_contains '"dependent.mo"' - # # assert_contains '"dependency.mo"' + assert_command dfx deploy -vv dependent + assert_contains '"dependent.mo"' + assert_contains '"dependency.mo"' + + touch dependent.mo + assert_command dfx deploy -vv dependent + assert_contains '"dependent.mo"' + assert_not_contains '"dependency.mo"' + + touch dependency.mo + assert_command dfx deploy -vv dependent + assert_contains '"dependent.mo"' + assert_contains '"dependency.mo"' } diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index fa14a47aaa..fbf8a87d5a 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -653,18 +653,33 @@ impl CanisterPool { Ok(dest_graph) } + /// TODO: Duplicate entity domainL `canisters_to_build` and `build_config.canisters_to_build`. #[context("Failed step_prebuild_all.")] - fn step_prebuild_all(&self, log: &Logger, build_config: &BuildConfig) -> DfxResult<()> { - println!("step_prebuild_all: {:?}", self.canisters.iter().map(|c| c.get_name()).collect::>()); + fn step_prebuild_all(&self, log: &Logger, build_config: &BuildConfig, canisters_to_build: &Vec<&Arc>) -> DfxResult<()> { // moc expects all .did files of dependencies to be in with name .did. // Because some canisters don't get built these .did files have to be copied over manually. - for canister in self.canisters.iter().filter(|c| { - build_config - .canisters_to_build - .as_ref() - .map(|cans| !cans.iter().contains(&c.get_name().to_string())) - .unwrap_or(false) - }) { + let iter = canisters_to_build.iter() + .map(|&canister| { + // TODO: Is `unwrap` on the next line legit? + let parent_node = *self.imports.borrow().nodes.get(&Import::Canister(canister.as_ref().get_name().to_owned())).unwrap(); + let imports = self.imports.borrow(); + let neighbors = imports.graph.neighbors(parent_node); + neighbors + .map(|id| imports.nodes.iter() + .find_map(move |(k, v)| if v == &id { Some(k.clone()) } else { None })) // TODO: slow + .filter_map(|import| + if let Some(Import::Canister(name)) = import { // TODO: The above produces a superfluous `Option<>`. + self.get_first_canister_with_name(&name) + } else { + None + } + ) + // .map(|x| (x, ())) + .collect::>() + }) + .flatten(); + // FIXME: The above may produce duplicate canisters. + for canister in iter { let maybe_from = if let Some(remote_candid) = canister.info.get_remote_candid() { Some(remote_candid) } else { @@ -799,13 +814,14 @@ impl CanisterPool { log: &Logger, build_config: &BuildConfig, ) -> DfxResult>> { - self.step_prebuild_all(log, build_config) - .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; - let order = self.build_order(env, &build_config.canisters_to_build.clone())?; // TODO: Eliminate `clone`. // TODO: The next line is slow and confusing code. let canisters_to_build: Vec<&Arc> = self.canisters.iter().filter(|c| order.contains(&c.canister_id())).collect(); + + self.step_prebuild_all(log, build_config, &canisters_to_build) + .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; + let mut result = Vec::new(); for canister_id in &order { if let Some(canister) = self.get_canister(canister_id) { From 769f89c1d1000c65629d24a984d138e013a6418b Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 04:30:20 +0300 Subject: [PATCH 153/354] misc --- e2e/tests-dfx/make_like.bash | 40 +++++++++++++++++++++++++++++- src/dfx/src/lib/models/canister.rs | 1 + 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 7bc8db02fa..e3b6c47cc7 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -15,7 +15,7 @@ teardown() { standard_teardown } -@test "trying to break dependency compiling" { +@test "trying to break dependency compiling: deploy" { dfx_start assert_command dfx deploy -vv dependent @@ -31,4 +31,42 @@ teardown() { assert_command dfx deploy -vv dependent assert_contains '"dependent.mo"' assert_contains '"dependency.mo"' + + touch dependency.mo + assert_command dfx deploy -vv dependency + assert_not_contains '"dependent.mo"' + assert_contains '"dependency.mo"' + + assert_command dfx deploy -vv dependent + assert_contains '"dependent.mo"' + assert_not_contains '"dependency.mo"' +} + +@test "trying to break dependency compiling: build" { + dfx_start + + assert_command dfx canister create dependency + assert_command dfx canister create dependent + assert_command dfx build -vv dependent + assert_contains '"dependent.mo"' + assert_contains '"dependency.mo"' + + touch dependent.mo + assert_command dfx build -vv dependent + assert_contains '"dependent.mo"' + assert_not_contains '"dependency.mo"' + + touch dependency.mo + assert_command dfx build -vv dependent + assert_contains '"dependent.mo"' + assert_contains '"dependency.mo"' + + touch dependency.mo + assert_command dfx build -vv dependency + assert_not_contains '"dependent.mo"' + assert_contains '"dependency.mo"' + + assert_command dfx build -vv dependent + assert_contains '"dependent.mo"' + assert_not_contains '"dependency.mo"' } diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index fbf8a87d5a..a58bbcfe80 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -761,6 +761,7 @@ impl CanisterPool { canister.postbuild(self, build_config) } + // FIXME: Make cleanup reverse to the (updated) `step_prebuild_all`. fn step_postbuild_all( &self, build_config: &BuildConfig, From cd896763952f3c972041598a51abd911ec6da485 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 04:59:15 +0300 Subject: [PATCH 154/354] more tests --- e2e/assets/make_like/dependency.mo | 2 ++ e2e/tests-dfx/make_like.bash | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/e2e/assets/make_like/dependency.mo b/e2e/assets/make_like/dependency.mo index 9806b1fdec..8be304ae1d 100644 --- a/e2e/assets/make_like/dependency.mo +++ b/e2e/assets/make_like/dependency.mo @@ -1,3 +1,5 @@ +import L "lib"; + actor { public shared func greet(name: Text) : async Text { return "Hello, " # name # "!"; diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index e3b6c47cc7..d0652174f6 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -40,6 +40,16 @@ teardown() { assert_command dfx deploy -vv dependent assert_contains '"dependent.mo"' assert_not_contains '"dependency.mo"' + + touch lib.mo + assert_command dfx deploy -vv dependent + assert_contains '"dependent.mo"' + assert_contains '"dependency.mo"' + + touch lib.mo + assert_command dfx deploy -vv dependency + assert_contains '"dependency.mo"' + assert_not_contains '"dependent.mo"' } @test "trying to break dependency compiling: build" { @@ -69,4 +79,14 @@ teardown() { assert_command dfx build -vv dependent assert_contains '"dependent.mo"' assert_not_contains '"dependency.mo"' + + touch lib.mo + assert_command dfx build -vv dependent + assert_contains '"dependent.mo"' + assert_contains '"dependency.mo"' + + touch lib.mo + assert_command dfx build -vv dependency + assert_contains '"dependency.mo"' + assert_not_contains '"dependent.mo"' } From b2c7adef7178e42e64315c113fc97bff22577805 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 05:30:02 +0300 Subject: [PATCH 155/354] comment typo --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index a58bbcfe80..1124d272d4 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -653,7 +653,7 @@ impl CanisterPool { Ok(dest_graph) } - /// TODO: Duplicate entity domainL `canisters_to_build` and `build_config.canisters_to_build`. + /// TODO: Duplicate entity domain `canisters_to_build` and `build_config.canisters_to_build`. #[context("Failed step_prebuild_all.")] fn step_prebuild_all(&self, log: &Logger, build_config: &BuildConfig, canisters_to_build: &Vec<&Arc>) -> DfxResult<()> { // moc expects all .did files of dependencies to be in with name .did. From bf7c1c70bfbc9fbd2e24fbee1d7e6480b012caae Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 05:31:23 +0300 Subject: [PATCH 156/354] refactor --- src/dfx/src/lib/models/canister.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 1124d272d4..4208dcb0a6 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -665,10 +665,10 @@ impl CanisterPool { let imports = self.imports.borrow(); let neighbors = imports.graph.neighbors(parent_node); neighbors - .map(|id| imports.nodes.iter() + .filter_map(|id| imports.nodes.iter() .find_map(move |(k, v)| if v == &id { Some(k.clone()) } else { None })) // TODO: slow .filter_map(|import| - if let Some(Import::Canister(name)) = import { // TODO: The above produces a superfluous `Option<>`. + if let Import::Canister(name) = import { self.get_first_canister_with_name(&name) } else { None From 280fdb75bbe3661c472549644895259c5ef0772e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 05:35:04 +0300 Subject: [PATCH 157/354] refactor --- src/dfx/src/lib/models/canister.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 4208dcb0a6..624d8b32a0 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -659,7 +659,7 @@ impl CanisterPool { // moc expects all .did files of dependencies to be in with name .did. // Because some canisters don't get built these .did files have to be copied over manually. let iter = canisters_to_build.iter() - .map(|&canister| { + .flat_map(|&canister| { // TODO: Is `unwrap` on the next line legit? let parent_node = *self.imports.borrow().nodes.get(&Import::Canister(canister.as_ref().get_name().to_owned())).unwrap(); let imports = self.imports.borrow(); @@ -676,8 +676,7 @@ impl CanisterPool { ) // .map(|x| (x, ())) .collect::>() - }) - .flatten(); + }); // FIXME: The above may produce duplicate canisters. for canister in iter { let maybe_from = if let Some(remote_candid) = canister.info.get_remote_candid() { From d4a4254aa4f452de49e051e3d7660561f2461662 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 05:35:44 +0300 Subject: [PATCH 158/354] refactor --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 79e98e101d..b32051f49a 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -81,7 +81,7 @@ pub async fn deploy_canisters( let required_canisters = config .get_config() - .get_canister_names_with_dependencies(some_canister.as_deref())?; + .get_canister_names_with_dependencies(some_canister)?; let canisters_to_load = add_canisters_with_ids(&required_canisters, env, &config); let canister_pool = CanisterPool::load(env, true, &canisters_to_load)?; From 4f9daea899a217d983f71f2c3c451ee22a1adef6 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 05:36:10 +0300 Subject: [PATCH 159/354] refactor --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index b32051f49a..22ee442e13 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -173,7 +173,7 @@ pub async fn deploy_canisters( let force_reinstall = matches!(deploy_mode, ForceReinstallSingleCanister(_)); install_canisters( env, - &canisters_to_install, + canisters_to_install, &initial_canister_id_store, &config, argument, From ac8864122b2510e3ac5d09945ab2f326009b7ece Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 06:26:09 +0300 Subject: [PATCH 160/354] some Clippy fixes --- src/dfx/src/lib/builders/motoko.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 0b0ec0270f..2e23ac5f6f 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -57,7 +57,7 @@ pub fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut Imports } else { Import::Relative(file.to_path_buf()) }; - if let Some(_) = imports.nodes.get(&parent) { // The item is already in the graph. + if imports.nodes.get(&parent).is_some() { // The item is already in the graph. return Ok(()); } else { imports.nodes.insert(parent.clone(), imports.graph.add_node(parent.clone())); @@ -106,7 +106,7 @@ impl CanisterBuilder for MotokoBuilder { pool: &CanisterPool, info: &CanisterInfo, ) -> DfxResult> { - add_imports(self.cache.as_ref(), info, &mut *pool.imports.borrow_mut(), pool)?; + add_imports(self.cache.as_ref(), info, &mut pool.imports.borrow_mut(), pool)?; // TODO: In some reason, the following line is needed only for `deploy`, not for `build`. let graph = &pool.imports.borrow().graph; @@ -121,11 +121,8 @@ impl CanisterBuilder for MotokoBuilder { } Err(err) => { let message = match graph.node_weight(err.node_id()) { - Some(canister_id) => match canister_id { - Import::Canister(name) => &name, - _ => "", - }, - None => "", + Some(Import::Canister(name)) => &name, + _ => "", }; return Err(DfxError::new(BuildError::DependencyError(format!( "Found circular dependency: {}", From 6c68fc10027934b795d971427e89665d84edc190 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 06:32:18 +0300 Subject: [PATCH 161/354] clippy recommendations followed --- src/dfx/src/lib/builders/mod.rs | 14 +++----------- src/dfx/src/lib/builders/motoko.rs | 2 +- src/dfx/src/lib/graph/traverse_filtered.rs | 6 +++--- src/dfx/src/lib/models/canister.rs | 10 +++++----- 4 files changed, 12 insertions(+), 20 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index dacf6a7f38..ff0f2f5e50 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -243,7 +243,7 @@ pub trait CanisterBuilder { .collect(); if canister_info.is_motoko() { // hack - add_imports(cache, canister_info, &mut *pool.imports.borrow_mut(), pool)?; + add_imports(cache, canister_info, &mut pool.imports.borrow_mut(), pool)?; } // Check that one of the dependencies is newer than the target: @@ -267,11 +267,7 @@ pub trait CanisterBuilder { Import::Canister(canister_name) => { // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = canister.get_info().get_main_file(); - if let Some(main_file) = main_file { - Some(main_file.to_owned()) - } else { - None - } + main_file.map(|main_file| main_file.to_owned()) } else { None } @@ -279,11 +275,7 @@ pub trait CanisterBuilder { Import::Ic(canister_id) => { if let Some(canister_name) = rev_id_map.get(canister_id.as_str()) { if let Some(canister) = pool.get_first_canister_with_name(canister_name) { - if let Some(main_file) = canister.get_info().get_main_file() { - Some(main_file.to_owned()) - } else { - None - } + canister.get_info().get_main_file().map(|main_file| main_file.to_owned()) } else { None } diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 2e23ac5f6f..cca3c90a9c 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -121,7 +121,7 @@ impl CanisterBuilder for MotokoBuilder { } Err(err) => { let message = match graph.node_weight(err.node_id()) { - Some(Import::Canister(name)) => &name, + Some(Import::Canister(name)) => name, _ => "", }; return Err(DfxError::new(BuildError::DependencyError(format!( diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index caae7cf605..a07d059244 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -24,14 +24,14 @@ impl BfsFiltered { VM: VisitMap, { while let Some(source_child_id) = &self.base.next(graph) { - if (&mut predicate)(source_child_id)? { + if predicate(source_child_id)? { let mut source_parent_iter = graph.neighbors_directed(*source_child_id, Incoming); let mut source_parent_id; if let Some(id1) = source_parent_iter.next() { source_parent_id = id1; loop { - if (&mut predicate)(&source_parent_id)? { - (&mut call)(&source_parent_id, &source_child_id)?; + if predicate(&source_parent_id)? { + call(&source_parent_id, source_child_id)?; break; } if let Some(id2) = source_parent_iter.next() { diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 624d8b32a0..600bf6a119 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -605,7 +605,7 @@ impl CanisterPool { panic!("programming error"); } }; - let parent_canister = self.get_first_canister_with_name(&parent_name).unwrap().canister_id(); + let parent_canister = self.get_first_canister_with_name(parent_name).unwrap().canister_id(); dest_id_set.entry(start_node).or_insert_with(|| dest_graph.add_node(parent_canister)); let bfs = Bfs::new(&source_graph, start_node); @@ -628,7 +628,7 @@ impl CanisterPool { panic!("programming error"); } }; - let parent_canister = self.get_first_canister_with_name(&parent_name).unwrap().canister_id(); + let parent_canister = self.get_first_canister_with_name(parent_name).unwrap().canister_id(); let child = source_graph.node_weight(source_child_id).unwrap(); let child_name = match child { @@ -637,7 +637,7 @@ impl CanisterPool { panic!("programming error"); } }; - let child_canister = self.get_first_canister_with_name(&child_name) + let child_canister = self.get_first_canister_with_name(child_name) .ok_or_else(|| anyhow!("A canister with the name '{}' was not found in the current project.", child_name.clone()))? .canister_id(); @@ -655,7 +655,7 @@ impl CanisterPool { /// TODO: Duplicate entity domain `canisters_to_build` and `build_config.canisters_to_build`. #[context("Failed step_prebuild_all.")] - fn step_prebuild_all(&self, log: &Logger, build_config: &BuildConfig, canisters_to_build: &Vec<&Arc>) -> DfxResult<()> { + fn step_prebuild_all(&self, log: &Logger, build_config: &BuildConfig, canisters_to_build: &[&Arc]) -> DfxResult<()> { // moc expects all .did files of dependencies to be in with name .did. // Because some canisters don't get built these .did files have to be copied over manually. let iter = canisters_to_build.iter() @@ -819,7 +819,7 @@ impl CanisterPool { // TODO: The next line is slow and confusing code. let canisters_to_build: Vec<&Arc> = self.canisters.iter().filter(|c| order.contains(&c.canister_id())).collect(); - self.step_prebuild_all(log, build_config, &canisters_to_build) + self.step_prebuild_all(log, build_config, canisters_to_build.as_slice()) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; let mut result = Vec::new(); From 78625ea9cc79e2a2760f35b6d999326420afdc17 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 06:49:58 +0300 Subject: [PATCH 162/354] error noted --- e2e/tests-dfx/make_like.bash | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index d0652174f6..3a241ebd55 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -44,7 +44,7 @@ teardown() { touch lib.mo assert_command dfx deploy -vv dependent assert_contains '"dependent.mo"' - assert_contains '"dependency.mo"' + assert_contains '"dependency.mo"' # FIXME: This test fails randomly touch lib.mo assert_command dfx deploy -vv dependency @@ -83,7 +83,7 @@ teardown() { touch lib.mo assert_command dfx build -vv dependent assert_contains '"dependent.mo"' - assert_contains '"dependency.mo"' + assert_contains '"dependency.mo"' # FIXME: This test fails randomly touch lib.mo assert_command dfx build -vv dependency From b81331a61038cacee12610bc2405ca6fa8a2b2a2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 07:36:14 +0300 Subject: [PATCH 163/354] small test --- e2e/tests-dfx/make_like.bash | 96 ++++++++++++++++++------------------ 1 file changed, 49 insertions(+), 47 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 3a241ebd55..bc4f0b3761 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -15,78 +15,80 @@ teardown() { standard_teardown } -@test "trying to break dependency compiling: deploy" { - dfx_start +# DON'T UNCOMMENT the below, after the cause of random failures of the below toy test will be discovered. - assert_command dfx deploy -vv dependent - assert_contains '"dependent.mo"' - assert_contains '"dependency.mo"' +# @test "trying to break dependency compiling: deploy" { +# dfx_start - touch dependent.mo - assert_command dfx deploy -vv dependent - assert_contains '"dependent.mo"' - assert_not_contains '"dependency.mo"' +# assert_command dfx deploy -vv dependent +# assert_contains '"dependent.mo"' +# assert_contains '"dependency.mo"' - touch dependency.mo - assert_command dfx deploy -vv dependent - assert_contains '"dependent.mo"' - assert_contains '"dependency.mo"' +# touch dependent.mo +# assert_command dfx deploy -vv dependent +# assert_contains '"dependent.mo"' +# assert_not_contains '"dependency.mo"' - touch dependency.mo - assert_command dfx deploy -vv dependency - assert_not_contains '"dependent.mo"' - assert_contains '"dependency.mo"' +# touch dependency.mo +# assert_command dfx deploy -vv dependent +# assert_contains '"dependent.mo"' +# assert_contains '"dependency.mo"' - assert_command dfx deploy -vv dependent - assert_contains '"dependent.mo"' - assert_not_contains '"dependency.mo"' +# touch dependency.mo +# assert_command dfx deploy -vv dependency +# assert_not_contains '"dependent.mo"' +# assert_contains '"dependency.mo"' - touch lib.mo - assert_command dfx deploy -vv dependent - assert_contains '"dependent.mo"' - assert_contains '"dependency.mo"' # FIXME: This test fails randomly +# assert_command dfx deploy -vv dependent +# assert_contains '"dependent.mo"' +# assert_not_contains '"dependency.mo"' - touch lib.mo - assert_command dfx deploy -vv dependency - assert_contains '"dependency.mo"' - assert_not_contains '"dependent.mo"' -} +# touch lib.mo +# assert_command dfx deploy -vv dependent +# assert_contains '"dependent.mo"' +# assert_contains '"dependency.mo"' # FIXME: This test fails randomly + +# touch lib.mo +# assert_command dfx deploy -vv dependency +# assert_contains '"dependency.mo"' +# assert_not_contains '"dependent.mo"' +# } @test "trying to break dependency compiling: build" { dfx_start assert_command dfx canister create dependency assert_command dfx canister create dependent - assert_command dfx build -vv dependent - assert_contains '"dependent.mo"' - assert_contains '"dependency.mo"' + # assert_command dfx build -vv dependent + # assert_contains '"dependent.mo"' + # assert_contains '"dependency.mo"' - touch dependent.mo - assert_command dfx build -vv dependent - assert_contains '"dependent.mo"' - assert_not_contains '"dependency.mo"' + # touch dependent.mo + # assert_command dfx build -vv dependent + # assert_contains '"dependent.mo"' + # assert_not_contains '"dependency.mo"' - touch dependency.mo - assert_command dfx build -vv dependent - assert_contains '"dependent.mo"' - assert_contains '"dependency.mo"' + # touch dependency.mo + # assert_command dfx build -vv dependent + # assert_contains '"dependent.mo"' + # assert_contains '"dependency.mo"' touch dependency.mo assert_command dfx build -vv dependency assert_not_contains '"dependent.mo"' assert_contains '"dependency.mo"' - assert_command dfx build -vv dependent - assert_contains '"dependent.mo"' - assert_not_contains '"dependency.mo"' + # assert_command dfx build -vv dependent + # assert_contains '"dependent.mo"' + # assert_not_contains '"dependency.mo"' touch lib.mo assert_command dfx build -vv dependent assert_contains '"dependent.mo"' assert_contains '"dependency.mo"' # FIXME: This test fails randomly - touch lib.mo - assert_command dfx build -vv dependency - assert_contains '"dependency.mo"' - assert_not_contains '"dependent.mo"' + # touch lib.mo + # assert_command dfx build -vv dependency + # assert_contains '"dependency.mo"' + # assert_not_contains '"dependent.mo"' } From 32ae10af5c78cec59827065c095602e3b7d63980 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 17:32:15 +0300 Subject: [PATCH 164/354] comment changed --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 600bf6a119..cd53560cd0 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -657,7 +657,7 @@ impl CanisterPool { #[context("Failed step_prebuild_all.")] fn step_prebuild_all(&self, log: &Logger, build_config: &BuildConfig, canisters_to_build: &[&Arc]) -> DfxResult<()> { // moc expects all .did files of dependencies to be in with name .did. - // Because some canisters don't get built these .did files have to be copied over manually. + // Copy .did files into this temporary directory. let iter = canisters_to_build.iter() .flat_map(|&canister| { // TODO: Is `unwrap` on the next line legit? From 98c5251c652555f259fe85d034b24e87aed56191 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 20:18:04 +0300 Subject: [PATCH 165/354] bug fix --- e2e/tests-dfx/make_like.bash | 56 +++++++++++++++--------------- src/dfx/src/lib/builders/motoko.rs | 2 +- 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index bc4f0b3761..1ff960bdcc 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -21,37 +21,37 @@ teardown() { # dfx_start # assert_command dfx deploy -vv dependent -# assert_contains '"dependent.mo"' -# assert_contains '"dependency.mo"' +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' # touch dependent.mo # assert_command dfx deploy -vv dependent -# assert_contains '"dependent.mo"' -# assert_not_contains '"dependency.mo"' +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_not_contains '"moc-wrapper" "dependency.mo"' # touch dependency.mo # assert_command dfx deploy -vv dependent -# assert_contains '"dependent.mo"' -# assert_contains '"dependency.mo"' +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' # touch dependency.mo # assert_command dfx deploy -vv dependency -# assert_not_contains '"dependent.mo"' -# assert_contains '"dependency.mo"' +# assert_not_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' # assert_command dfx deploy -vv dependent -# assert_contains '"dependent.mo"' -# assert_not_contains '"dependency.mo"' +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_not_contains '"moc-wrapper" "dependency.mo"' # touch lib.mo # assert_command dfx deploy -vv dependent -# assert_contains '"dependent.mo"' -# assert_contains '"dependency.mo"' # FIXME: This test fails randomly +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' # FIXME: This test fails randomly # touch lib.mo # assert_command dfx deploy -vv dependency -# assert_contains '"dependency.mo"' -# assert_not_contains '"dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' +# assert_not_contains '"moc-wrapper" "dependent.mo"' # } @test "trying to break dependency compiling: build" { @@ -60,35 +60,35 @@ teardown() { assert_command dfx canister create dependency assert_command dfx canister create dependent # assert_command dfx build -vv dependent - # assert_contains '"dependent.mo"' - # assert_contains '"dependency.mo"' + # assert_contains '"moc-wrapper" "dependent.mo"' + # assert_contains '"moc-wrapper" "dependency.mo"' # touch dependent.mo # assert_command dfx build -vv dependent - # assert_contains '"dependent.mo"' - # assert_not_contains '"dependency.mo"' + # assert_contains '"moc-wrapper" "dependent.mo"' + # assert_not_contains '"moc-wrapper" "dependency.mo"' # touch dependency.mo # assert_command dfx build -vv dependent - # assert_contains '"dependent.mo"' - # assert_contains '"dependency.mo"' + # assert_contains '"moc-wrapper" "dependent.mo"' + # assert_contains '"moc-wrapper" "dependency.mo"' touch dependency.mo assert_command dfx build -vv dependency - assert_not_contains '"dependent.mo"' - assert_contains '"dependency.mo"' + assert_not_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' # assert_command dfx build -vv dependent - # assert_contains '"dependent.mo"' - # assert_not_contains '"dependency.mo"' + # assert_contains '"moc-wrapper" "dependent.mo"' + # assert_not_contains '"moc-wrapper" "dependency.mo"' touch lib.mo assert_command dfx build -vv dependent - assert_contains '"dependent.mo"' - assert_contains '"dependency.mo"' # FIXME: This test fails randomly + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' # FIXME: This test fails randomly # touch lib.mo # assert_command dfx build -vv dependency - # assert_contains '"dependency.mo"' - # assert_not_contains '"dependent.mo"' + # assert_contains '"moc-wrapper" "dependency.mo"' + # assert_not_contains '"moc-wrapper" "dependent.mo"' } diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index cca3c90a9c..86e3cb039e 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -80,7 +80,7 @@ pub fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut Imports if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = canister.get_info().get_main_file(); if let Some(main_file) = main_file { - add_imports_recursive(cache, Path::new(main_file), imports, pool, None)?; + add_imports_recursive(cache, Path::new(main_file), imports, pool, Some(canister.get_info()))?; } } } From 656715c83984406d4978945ad11e78b7ca8c9b7d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 20:40:17 +0300 Subject: [PATCH 166/354] tests passed --- e2e/tests-dfx/make_like.bash | 96 ++++++++++++++++++------------------ 1 file changed, 47 insertions(+), 49 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 1ff960bdcc..2eaade214f 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -15,80 +15,78 @@ teardown() { standard_teardown } -# DON'T UNCOMMENT the below, after the cause of random failures of the below toy test will be discovered. - -# @test "trying to break dependency compiling: deploy" { -# dfx_start +@test "trying to break dependency compiling: deploy" { + dfx_start -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' -# touch dependent.mo -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_not_contains '"moc-wrapper" "dependency.mo"' + touch dependent.mo + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_not_contains '"moc-wrapper" "dependency.mo"' -# touch dependency.mo -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' + touch dependency.mo + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' -# touch dependency.mo -# assert_command dfx deploy -vv dependency -# assert_not_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' + touch dependency.mo + assert_command dfx deploy -vv dependency + assert_not_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_not_contains '"moc-wrapper" "dependency.mo"' + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_not_contains '"moc-wrapper" "dependency.mo"' -# touch lib.mo -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' # FIXME: This test fails randomly + touch lib.mo + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' # FIXME: This test fails randomly -# touch lib.mo -# assert_command dfx deploy -vv dependency -# assert_contains '"moc-wrapper" "dependency.mo"' -# assert_not_contains '"moc-wrapper" "dependent.mo"' -# } + touch lib.mo + assert_command dfx deploy -vv dependency + assert_contains '"moc-wrapper" "dependency.mo"' + assert_not_contains '"moc-wrapper" "dependent.mo"' +} @test "trying to break dependency compiling: build" { dfx_start assert_command dfx canister create dependency assert_command dfx canister create dependent - # assert_command dfx build -vv dependent - # assert_contains '"moc-wrapper" "dependent.mo"' - # assert_contains '"moc-wrapper" "dependency.mo"' + assert_command dfx build -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' - # touch dependent.mo - # assert_command dfx build -vv dependent - # assert_contains '"moc-wrapper" "dependent.mo"' - # assert_not_contains '"moc-wrapper" "dependency.mo"' + touch dependent.mo + assert_command dfx build -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_not_contains '"moc-wrapper" "dependency.mo"' - # touch dependency.mo - # assert_command dfx build -vv dependent - # assert_contains '"moc-wrapper" "dependent.mo"' - # assert_contains '"moc-wrapper" "dependency.mo"' + touch dependency.mo + assert_command dfx build -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' touch dependency.mo assert_command dfx build -vv dependency assert_not_contains '"moc-wrapper" "dependent.mo"' assert_contains '"moc-wrapper" "dependency.mo"' - # assert_command dfx build -vv dependent - # assert_contains '"moc-wrapper" "dependent.mo"' - # assert_not_contains '"moc-wrapper" "dependency.mo"' + assert_command dfx build -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_not_contains '"moc-wrapper" "dependency.mo"' touch lib.mo assert_command dfx build -vv dependent assert_contains '"moc-wrapper" "dependent.mo"' assert_contains '"moc-wrapper" "dependency.mo"' # FIXME: This test fails randomly - # touch lib.mo - # assert_command dfx build -vv dependency - # assert_contains '"moc-wrapper" "dependency.mo"' - # assert_not_contains '"moc-wrapper" "dependent.mo"' + touch lib.mo + assert_command dfx build -vv dependency + assert_contains '"moc-wrapper" "dependency.mo"' + assert_not_contains '"moc-wrapper" "dependent.mo"' } From 746a2fe66dfdeadbf6dc46b9a7c8fb2489a3542f Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 20:57:02 +0300 Subject: [PATCH 167/354] removed outdated FIXME comments --- e2e/tests-dfx/make_like.bash | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 2eaade214f..046adb2a79 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -44,7 +44,7 @@ teardown() { touch lib.mo assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' # FIXME: This test fails randomly + assert_contains '"moc-wrapper" "dependency.mo"' touch lib.mo assert_command dfx deploy -vv dependency @@ -83,7 +83,7 @@ teardown() { touch lib.mo assert_command dfx build -vv dependent assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' # FIXME: This test fails randomly + assert_contains '"moc-wrapper" "dependency.mo"' touch lib.mo assert_command dfx build -vv dependency From 8b3ce1744cd1367db5865dbdb868bca6e8ce02a4 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 21:05:41 +0300 Subject: [PATCH 168/354] more logging --- src/dfx/src/lib/builders/mod.rs | 6 ++++-- src/dfx/src/lib/models/canister.rs | 2 +- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index ff0f2f5e50..36cb225b85 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -4,6 +4,7 @@ use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::models::canister::CanisterPool; use crate::lib::models::canister::Import; +use slog::trace; use anyhow::{bail, Context}; use candid::Principal as CanisterId; use candid_parser::utils::CandidSource; @@ -14,6 +15,7 @@ use dfx_core::util; use fn_error_context::context; use handlebars::Handlebars; use petgraph::visit::Bfs; +use slog::Logger; use std::borrow::Cow; use std::collections::BTreeMap; use std::ffi::OsStr; @@ -231,6 +233,7 @@ pub trait CanisterBuilder { pool: &CanisterPool, canister_info: &CanisterInfo, cache: &dyn Cache, + logger: &Logger, ) -> DfxResult { // let motoko_info = canister_info.as_info::()?; let output_wasm_path = canister_info.get_output_wasm_path(); @@ -321,8 +324,7 @@ pub trait CanisterBuilder { }; }; } else { - // FIXME: Uncomment: - // trace!(self.logger, "Canister {} already compiled.", canister_info.get_name()); + trace!(logger, "Canister {} already compiled.", canister_info.get_name()); return Ok(false); } } diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index cd53560cd0..6886450f33 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -835,7 +835,7 @@ impl CanisterPool { trace!(log, "Not building canister '{}'.", canister.get_name()); continue; } - if !canister.builder.should_build(self, &canister.info, env.get_cache().as_ref())? { + if !canister.builder.should_build(self, &canister.info, env.get_cache().as_ref(), env.get_logger())? { continue; } diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 22ee442e13..1bcef3e398 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -121,7 +121,7 @@ pub async fn deploy_canisters( true, |canister_config| canister_config.deploy))) .filter(|canister_name| if let Some(canister) = canister_pool.get_first_canister_with_name(canister_name) { - canister.builder.should_build(&canister_pool, &canister.info, env.get_cache().as_ref()).unwrap() // FIXME: `unwrap()` + canister.builder.should_build(&canister_pool, &canister.info, env.get_cache().as_ref(), env.get_logger()).unwrap() // FIXME: `unwrap()` } else { false } From a7096f24504f507b220686c61f29cfc81ed3b5de Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 23:09:05 +0300 Subject: [PATCH 169/354] removed FIXME code --- src/dfx/src/lib/builders/mod.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 36cb225b85..5324d54582 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -256,11 +256,7 @@ pub trait CanisterBuilder { let start = if let Some(node_index) = imports.nodes.get(&Import::Canister(canister_info.get_name().to_string())) { *node_index } else { - panic!("programming error"); // FIXME: correct? - // let node = Import::Relative(canister_info.get_main_path().to_path_buf()); - // let node_index = imports.graph.add_node(node.clone()); - // imports.nodes.insert(node, node_index); - // node_index + panic!("programming error"); }; let mut import_iter = Bfs::new(&imports.graph, start); loop { From b02b5980edaf8e331d119a30dcc004288e50d940 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 23:15:26 +0300 Subject: [PATCH 170/354] bug fix --- src/dfx/src/lib/models/canister.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 6886450f33..6887e4cbf2 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -669,15 +669,15 @@ impl CanisterPool { .find_map(move |(k, v)| if v == &id { Some(k.clone()) } else { None })) // TODO: slow .filter_map(|import| if let Import::Canister(name) = import { - self.get_first_canister_with_name(&name) + self.get_first_canister_with_name(&name).map(|canister| (name, canister)) } else { None } ) - // .map(|x| (x, ())) .collect::>() - }); - // FIXME: The above may produce duplicate canisters. + }) + .collect::>(); // eliminate duplicates + let iter = iter.values(); for canister in iter { let maybe_from = if let Some(remote_candid) = canister.info.get_remote_candid() { Some(remote_candid) From 476cb0827785b2a9ba0b1a57bc75ae737fde7a54 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 23:21:51 +0300 Subject: [PATCH 171/354] refactor --- src/dfx/src/lib/models/canister.rs | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 6887e4cbf2..4f781b5304 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -653,11 +653,7 @@ impl CanisterPool { Ok(dest_graph) } - /// TODO: Duplicate entity domain `canisters_to_build` and `build_config.canisters_to_build`. - #[context("Failed step_prebuild_all.")] - fn step_prebuild_all(&self, log: &Logger, build_config: &BuildConfig, canisters_to_build: &[&Arc]) -> DfxResult<()> { - // moc expects all .did files of dependencies to be in with name .did. - // Copy .did files into this temporary directory. + fn canister_dependencies(&self, canisters_to_build: &[&Arc]) -> Vec> { let iter = canisters_to_build.iter() .flat_map(|&canister| { // TODO: Is `unwrap` on the next line legit? @@ -677,8 +673,15 @@ impl CanisterPool { .collect::>() }) .collect::>(); // eliminate duplicates - let iter = iter.values(); - for canister in iter { + iter.values().map(|p| p.clone()).collect() + } + + /// TODO: Duplicate entity domain `canisters_to_build` and `build_config.canisters_to_build`. + #[context("Failed step_prebuild_all.")] + fn step_prebuild_all(&self, log: &Logger, build_config: &BuildConfig, canisters_to_build: &[&Arc]) -> DfxResult<()> { + // moc expects all .did files of dependencies to be in with name .did. + // Copy .did files into this temporary directory. + for canister in self.canister_dependencies(canisters_to_build) { let maybe_from = if let Some(remote_candid) = canister.info.get_remote_candid() { Some(remote_candid) } else { From ab14e4007f8a8c1f81ba5f169c3369f5dfc782f0 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 23:25:03 +0300 Subject: [PATCH 172/354] bug fix --- src/dfx/src/lib/models/canister.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 4f781b5304..a6389515d0 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -763,15 +763,15 @@ impl CanisterPool { canister.postbuild(self, build_config) } - // FIXME: Make cleanup reverse to the (updated) `step_prebuild_all`. fn step_postbuild_all( &self, build_config: &BuildConfig, _order: &[CanisterId], + canisters_to_build: &[&Arc], ) -> DfxResult<()> { // We don't want to simply remove the whole directory, as in the future, // we may want to keep the IDL files downloaded from network. - for canister in self.canisters_to_build(build_config) { + for canister in self.canister_dependencies(canisters_to_build) { let idl_root = &build_config.idl_root; let canister_id = canister.canister_id(); let idl_file_path = idl_root.join(canister_id.to_text()).with_extension("did"); @@ -875,7 +875,7 @@ impl CanisterPool { } } - self.step_postbuild_all(build_config, &order) + self.step_postbuild_all(build_config, &order, canisters_to_build.as_slice()) .map_err(|e| DfxError::new(BuildError::PostBuildAllStepFailed(Box::new(e))))?; Ok(result) From 7025c461d8c7411f053e1e19a6c591c0c920a54e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 23:28:31 +0300 Subject: [PATCH 173/354] FIXME -> TODO --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 1bcef3e398..1a9d43becf 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -105,7 +105,7 @@ pub async fn deploy_canisters( .collect(), }; - // FIXME: `build_order` is called two times during deployment of a new canister. + // TODO: `build_order` is called two times during deployment of a new canister. let order = canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // FIXME: `Some` here is a hack. // TODO: Eliminate `clone`. let order_names: Vec = order.iter() .map(|canister| canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); // FIXME: Is `unwrap` here correct? From 4309de17adcac257a091c6a8c4f53d0edd5dc201 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 23:30:04 +0300 Subject: [PATCH 174/354] solving FIXMEs --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 1a9d43becf..23c9892c2b 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -106,9 +106,9 @@ pub async fn deploy_canisters( }; // TODO: `build_order` is called two times during deployment of a new canister. - let order = canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // FIXME: `Some` here is a hack. // TODO: Eliminate `clone`. + let order = canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. let order_names: Vec = order.iter() - .map(|canister| canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); // FIXME: Is `unwrap` here correct? + .map(|canister| canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); let canisters_to_install: &Vec = &canisters_to_build .clone() From 2ef1d8fbe7ccf26d7733ee82614e8fefbc46e84f Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 23:32:52 +0300 Subject: [PATCH 175/354] removed outdated TODO --- src/dfx/src/lib/builders/motoko.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 86e3cb039e..b4e56edaf7 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -132,8 +132,7 @@ impl CanisterBuilder for MotokoBuilder { } } - /// TODO: It supports Make-like dependencies for build, but not for "Post processing candid file"/"Shrinking WASM"/"Attaching metadata" - /// Ideally, should make inter-canister dependencies to rely on `.did` file changed or not. + /// TODO: Ideally, should make inter-canister dependencies to rely on `.did` file changed or not. #[context("Failed to build Motoko canister '{}'.", canister_info.get_name())] fn build( &self, From a0e6e0caf9baa9c938591bf3100b97e6a4a6e8f2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 23:34:54 +0300 Subject: [PATCH 176/354] removed TODO --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 23c9892c2b..8c94a14eb2 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -157,7 +157,6 @@ pub async fn deploy_canisters( // let canisters_to_load = all_project_canisters_with_ids(env, &config); - // let pool = canister_pool; // TODO build_canisters( env, // &order_names, From d5c219faec76d0103988c71e31c8f392c1464f90 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 23:35:36 +0300 Subject: [PATCH 177/354] removed commented out line --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 8c94a14eb2..30c1e31684 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -155,8 +155,6 @@ pub async fn deploy_canisters( info!(env.get_logger(), "All canisters have already been created."); } - // let canisters_to_load = all_project_canisters_with_ids(env, &config); - build_canisters( env, // &order_names, From 3885c460a55acbbb3878759ce2c4132c50de7073 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Wed, 24 Apr 2024 23:38:44 +0300 Subject: [PATCH 178/354] followed clippy's recommendation --- src/dfx/src/lib/models/canister.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index a6389515d0..923bc7afe0 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -673,7 +673,7 @@ impl CanisterPool { .collect::>() }) .collect::>(); // eliminate duplicates - iter.values().map(|p| p.clone()).collect() + iter.values().cloned().collect() } /// TODO: Duplicate entity domain `canisters_to_build` and `build_config.canisters_to_build`. From aaa07b08753bde7240a07ac6db9592f8be7dd359 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 00:36:11 +0300 Subject: [PATCH 179/354] bug fix --- src/dfx/src/lib/builders/mod.rs | 10 ++++++++++ src/dfx/src/lib/models/canister.rs | 9 ++++++++- .../src/lib/operations/canister/deploy_canisters.rs | 2 ++ 3 files changed, 20 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 5324d54582..b1a62c56b2 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -247,6 +247,16 @@ pub trait CanisterBuilder { if canister_info.is_motoko() { // hack add_imports(cache, canister_info, &mut pool.imports.borrow_mut(), pool)?; + } else { + let node = Import::Canister(canister_info.get_name().to_owned()); + let parent_id = *pool.imports.borrow_mut().nodes.entry(node.clone()) + .or_insert_with(|| pool.imports.borrow_mut().graph.add_node(node)); + for child in canister_info.get_dependencies() { + let child_node = Import::Canister(child.clone()); + let child_id = *pool.imports.borrow_mut().nodes.entry(child_node.clone()) + .or_insert_with(|| pool.imports.borrow_mut().graph.add_node(child_node)); + pool.imports.borrow_mut().graph.update_edge(parent_id, child_id, ()); + } } // Check that one of the dependencies is newer than the target: diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 923bc7afe0..9ec965a1c2 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -564,11 +564,13 @@ impl CanisterPool { &self, canisters_to_build: Option> ) -> DfxResult> { - // println!("canisters_to_build: {:?}", canisters_to_build); + println!("canisters_to_build: {:?}", canisters_to_build); // FIXME: Remove. let real_canisters_to_build: Vec<_> = match canisters_to_build { Some(ref canisters_to_build) => canisters_to_build.clone(), // TODO: Remove `clone()` None => self.canisters.iter().map(|canister| canister.get_name().to_string()).collect(), }; + println!("real_canisters_to_build: {:?}", real_canisters_to_build); // FIXME: Remove. + println!("self.canisters: {:?}", self.canisters.iter().map(|c| c.get_name()).collect::>()); // FIXME: Remove. for canister in &self.canisters { // a little inefficient let contains = if let Some(canisters_to_build) = &canisters_to_build { @@ -587,11 +589,14 @@ impl CanisterPool { let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = real_canisters_to_build.iter().map(|name| Import::Canister(name.clone())).collect(); + println!("start1: {:?}", start); // FIXME: Remove. let start: Vec<_> = start.into_iter().filter_map(|node| if let Some(&id) = source_ids.get(&node) { Some(id) } else { None }).collect(); + println!("source_ids: {:?}", source_ids); // FIXME: Remove. + println!("start2: {:?}", start); // FIXME: Remove. // Transform the graph of file dependencies to graph of canister dependencies. // For this do DFS for each of `real_canisters_to_build`. let mut dest_graph: DiGraph = DiGraph::new(); @@ -790,6 +795,7 @@ impl CanisterPool { ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); let graph = self.build_dependencies_graph(canisters_to_build.clone())?; // TODO: Can `clone` be eliminated? + println!("YYY graph.node_count(): {}", graph.node_count()); let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { Some(canister_id) => match self.get_canister_info(canister_id) { @@ -800,6 +806,7 @@ impl CanisterPool { }; BuildError::DependencyError(format!("Found circular dependency: {}", message)) })?; + println!("YYY nodes.len(): {}", nodes.len()); Ok(nodes .iter() .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 30c1e31684..7912c40db5 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -107,8 +107,10 @@ pub async fn deploy_canisters( // TODO: `build_order` is called two times during deployment of a new canister. let order = canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. + println!("XXX canisters_to_build: {:?}", canisters_to_build); // FIXME: Remove. let order_names: Vec = order.iter() .map(|canister| canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); + println!("XXX order_names: {:?}", order_names); // FIXME: Remove. let canisters_to_install: &Vec = &canisters_to_build .clone() From f564c442b61c9817bcbdd1d2311f0378e8d58071 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 00:50:10 +0300 Subject: [PATCH 180/354] bug fix --- src/dfx/src/lib/builders/mod.rs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index b1a62c56b2..70a01e20fd 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -248,14 +248,15 @@ pub trait CanisterBuilder { if canister_info.is_motoko() { // hack add_imports(cache, canister_info, &mut pool.imports.borrow_mut(), pool)?; } else { + let imports = &mut *pool.imports.borrow_mut(); let node = Import::Canister(canister_info.get_name().to_owned()); - let parent_id = *pool.imports.borrow_mut().nodes.entry(node.clone()) - .or_insert_with(|| pool.imports.borrow_mut().graph.add_node(node)); + let parent_id = *imports.nodes.entry(node.clone()) + .or_insert_with(|| imports.graph.add_node(node)); for child in canister_info.get_dependencies() { let child_node = Import::Canister(child.clone()); - let child_id = *pool.imports.borrow_mut().nodes.entry(child_node.clone()) - .or_insert_with(|| pool.imports.borrow_mut().graph.add_node(child_node)); - pool.imports.borrow_mut().graph.update_edge(parent_id, child_id, ()); + let child_id = *imports.nodes.entry(child_node.clone()) + .or_insert_with(|| imports.graph.add_node(child_node)); + imports.graph.update_edge(parent_id, child_id, ()); } } From 6a98bb31defac037c9027da5688ccb8b05d973ea Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 01:25:17 +0300 Subject: [PATCH 181/354] bug fix --- src/dfx/src/lib/builders/mod.rs | 34 ++++++++++++++++++------------ src/dfx/src/lib/models/canister.rs | 14 ++++++------ 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 70a01e20fd..1bb60f8c06 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -228,6 +228,25 @@ pub trait CanisterBuilder { Ok(()) } + fn read_dependencies(&self, pool: &CanisterPool, canister_info: &CanisterInfo, cache: &dyn Cache) -> DfxResult { + if canister_info.is_motoko() { // hack + add_imports(cache, canister_info, &mut pool.imports.borrow_mut(), pool)?; + } else { + let imports = &mut *pool.imports.borrow_mut(); + println!("ADDING NON-MOTOKO node: {}", canister_info.get_name()); // FIXME + let node = Import::Canister(canister_info.get_name().to_owned()); + let parent_id = *imports.nodes.entry(node.clone()) + .or_insert_with(|| imports.graph.add_node(node)); + for child in canister_info.get_dependencies() { + let child_node = Import::Canister(child.clone()); + let child_id = *imports.nodes.entry(child_node.clone()) + .or_insert_with(|| imports.graph.add_node(child_node)); + imports.graph.update_edge(parent_id, child_id, ()); + } + } + Ok(()) + } + fn should_build( &self, pool: &CanisterPool, @@ -245,20 +264,7 @@ pub trait CanisterBuilder { .map(|&c| (c.canister_id().to_text(), c.get_name().to_string())) .collect(); - if canister_info.is_motoko() { // hack - add_imports(cache, canister_info, &mut pool.imports.borrow_mut(), pool)?; - } else { - let imports = &mut *pool.imports.borrow_mut(); - let node = Import::Canister(canister_info.get_name().to_owned()); - let parent_id = *imports.nodes.entry(node.clone()) - .or_insert_with(|| imports.graph.add_node(node)); - for child in canister_info.get_dependencies() { - let child_node = Import::Canister(child.clone()); - let child_id = *imports.nodes.entry(child_node.clone()) - .or_insert_with(|| imports.graph.add_node(child_node)); - imports.graph.update_edge(parent_id, child_id, ()); - } - } + self.read_dependencies(pool, canister_info, cache)?; // Check that one of the dependencies is newer than the target: if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 9ec965a1c2..16711ae9d7 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -13,6 +13,7 @@ use crate::util::assets; use anyhow::{anyhow, bail, Context}; use candid::Principal as CanisterId; use candid_parser::utils::CandidSource; +use dfx_core::config::cache::Cache; use dfx_core::config::model::canister_id_store::CanisterIdStore; use dfx_core::config::model::dfinity::{ CanisterMetadataSection, Config, MetadataVisibility, TechStack, WasmOptLevel, @@ -562,7 +563,8 @@ impl CanisterPool { #[context("Failed to build dependencies graph for canister pool.")] fn build_dependencies_graph( &self, - canisters_to_build: Option> + canisters_to_build: Option>, + cache: &dyn Cache, ) -> DfxResult> { println!("canisters_to_build: {:?}", canisters_to_build); // FIXME: Remove. let real_canisters_to_build: Vec<_> = match canisters_to_build { @@ -573,16 +575,14 @@ impl CanisterPool { println!("self.canisters: {:?}", self.canisters.iter().map(|c| c.get_name()).collect::>()); // FIXME: Remove. for canister in &self.canisters { // a little inefficient - let contains = if let Some(canisters_to_build) = &canisters_to_build { - canisters_to_build.iter().contains(&canister.get_info().get_name().to_string()) - } else { - true // because user specified to build all canisters - }; + let contains = real_canisters_to_build.iter().contains(&canister.get_info().get_name().to_string()); + println!("Contains: {}", contains); if contains { let canister_info = &canister.info; // TODO: Ignored return value is a hack. let _deps: Vec = canister.builder.get_dependencies(self, canister_info)?; } + canister.builder.read_dependencies(self, canister.get_info(), cache)?; // TODO: It is called multiple times during the flow. } let source_graph = &self.imports.borrow().graph; @@ -794,7 +794,7 @@ impl CanisterPool { canisters_to_build: &Option>, ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); - let graph = self.build_dependencies_graph(canisters_to_build.clone())?; // TODO: Can `clone` be eliminated? + let graph = self.build_dependencies_graph(canisters_to_build.clone(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? println!("YYY graph.node_count(): {}", graph.node_count()); let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { From f52f76a5276e9bd74c80dd0219c50dffac3ea340 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 01:34:59 +0300 Subject: [PATCH 182/354] removed tracing --- src/dfx/src/lib/builders/mod.rs | 1 - src/dfx/src/lib/models/canister.rs | 9 --------- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 -- 3 files changed, 12 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 1bb60f8c06..f297084094 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -233,7 +233,6 @@ pub trait CanisterBuilder { add_imports(cache, canister_info, &mut pool.imports.borrow_mut(), pool)?; } else { let imports = &mut *pool.imports.borrow_mut(); - println!("ADDING NON-MOTOKO node: {}", canister_info.get_name()); // FIXME let node = Import::Canister(canister_info.get_name().to_owned()); let parent_id = *imports.nodes.entry(node.clone()) .or_insert_with(|| imports.graph.add_node(node)); diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 16711ae9d7..275cbff853 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -566,17 +566,13 @@ impl CanisterPool { canisters_to_build: Option>, cache: &dyn Cache, ) -> DfxResult> { - println!("canisters_to_build: {:?}", canisters_to_build); // FIXME: Remove. let real_canisters_to_build: Vec<_> = match canisters_to_build { Some(ref canisters_to_build) => canisters_to_build.clone(), // TODO: Remove `clone()` None => self.canisters.iter().map(|canister| canister.get_name().to_string()).collect(), }; - println!("real_canisters_to_build: {:?}", real_canisters_to_build); // FIXME: Remove. - println!("self.canisters: {:?}", self.canisters.iter().map(|c| c.get_name()).collect::>()); // FIXME: Remove. for canister in &self.canisters { // a little inefficient let contains = real_canisters_to_build.iter().contains(&canister.get_info().get_name().to_string()); - println!("Contains: {}", contains); if contains { let canister_info = &canister.info; // TODO: Ignored return value is a hack. @@ -589,14 +585,11 @@ impl CanisterPool { let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = real_canisters_to_build.iter().map(|name| Import::Canister(name.clone())).collect(); - println!("start1: {:?}", start); // FIXME: Remove. let start: Vec<_> = start.into_iter().filter_map(|node| if let Some(&id) = source_ids.get(&node) { Some(id) } else { None }).collect(); - println!("source_ids: {:?}", source_ids); // FIXME: Remove. - println!("start2: {:?}", start); // FIXME: Remove. // Transform the graph of file dependencies to graph of canister dependencies. // For this do DFS for each of `real_canisters_to_build`. let mut dest_graph: DiGraph = DiGraph::new(); @@ -795,7 +788,6 @@ impl CanisterPool { ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); let graph = self.build_dependencies_graph(canisters_to_build.clone(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? - println!("YYY graph.node_count(): {}", graph.node_count()); let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { Some(canister_id) => match self.get_canister_info(canister_id) { @@ -806,7 +798,6 @@ impl CanisterPool { }; BuildError::DependencyError(format!("Found circular dependency: {}", message)) })?; - println!("YYY nodes.len(): {}", nodes.len()); Ok(nodes .iter() .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 7912c40db5..30c1e31684 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -107,10 +107,8 @@ pub async fn deploy_canisters( // TODO: `build_order` is called two times during deployment of a new canister. let order = canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. - println!("XXX canisters_to_build: {:?}", canisters_to_build); // FIXME: Remove. let order_names: Vec = order.iter() .map(|canister| canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); - println!("XXX order_names: {:?}", order_names); // FIXME: Remove. let canisters_to_install: &Vec = &canisters_to_build .clone() From afc0262c8e3c9819d2876964631c2e5751334e48 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 01:51:08 +0300 Subject: [PATCH 183/354] cargo fmt --- src/dfx-core/src/config/cache.rs | 2 +- src/dfx/src/lib/builders/mod.rs | 97 +++++++++------ src/dfx/src/lib/builders/motoko.rs | 97 +++++++++++---- src/dfx/src/lib/graph/mod.rs | 2 +- src/dfx/src/lib/graph/traverse_filtered.rs | 30 +++-- src/dfx/src/lib/models/canister.rs | 112 +++++++++++++----- .../operations/canister/deploy_canisters.rs | 41 +++++-- 7 files changed, 264 insertions(+), 117 deletions(-) diff --git a/src/dfx-core/src/config/cache.rs b/src/dfx-core/src/config/cache.rs index 22e0cef709..e75048ea66 100644 --- a/src/dfx-core/src/config/cache.rs +++ b/src/dfx-core/src/config/cache.rs @@ -4,8 +4,8 @@ use crate::error::cache::CacheError; #[cfg(not(windows))] use crate::foundation::get_user_home; use semver::Version; -use std::path::PathBuf; use std::fmt::Debug; +use std::path::PathBuf; pub trait Cache { fn version_str(&self) -> String; diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index f297084094..f52318de39 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -4,7 +4,6 @@ use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::models::canister::CanisterPool; use crate::lib::models::canister::Import; -use slog::trace; use anyhow::{bail, Context}; use candid::Principal as CanisterId; use candid_parser::utils::CandidSource; @@ -15,6 +14,7 @@ use dfx_core::util; use fn_error_context::context; use handlebars::Handlebars; use petgraph::visit::Bfs; +use slog::trace; use slog::Logger; use std::borrow::Cow; use std::collections::BTreeMap; @@ -228,17 +228,27 @@ pub trait CanisterBuilder { Ok(()) } - fn read_dependencies(&self, pool: &CanisterPool, canister_info: &CanisterInfo, cache: &dyn Cache) -> DfxResult { - if canister_info.is_motoko() { // hack + fn read_dependencies( + &self, + pool: &CanisterPool, + canister_info: &CanisterInfo, + cache: &dyn Cache, + ) -> DfxResult { + if canister_info.is_motoko() { + // hack add_imports(cache, canister_info, &mut pool.imports.borrow_mut(), pool)?; } else { let imports = &mut *pool.imports.borrow_mut(); let node = Import::Canister(canister_info.get_name().to_owned()); - let parent_id = *imports.nodes.entry(node.clone()) + let parent_id = *imports + .nodes + .entry(node.clone()) .or_insert_with(|| imports.graph.add_node(node)); for child in canister_info.get_dependencies() { let child_node = Import::Canister(child.clone()); - let child_id = *imports.nodes.entry(child_node.clone()) + let child_id = *imports + .nodes + .entry(child_node.clone()) .or_insert_with(|| imports.graph.add_node(child_node)); imports.graph.update_edge(parent_id, child_id, ()); } @@ -269,7 +279,10 @@ pub trait CanisterBuilder { if let Ok(wasm_file_metadata) = metadata(output_wasm_path) { let wasm_file_time = wasm_file_metadata.modified()?; let imports = pool.imports.borrow_mut(); - let start = if let Some(node_index) = imports.nodes.get(&Import::Canister(canister_info.get_name().to_string())) { + let start = if let Some(node_index) = imports + .nodes + .get(&Import::Canister(canister_info.get_name().to_string())) + { *node_index } else { panic!("programming error"); @@ -279,8 +292,11 @@ pub trait CanisterBuilder { if let Some(import) = import_iter.next(&imports.graph) { let subnode = &imports.graph[import]; let imported_file = match subnode { - Import::Canister(canister_name) => { // duplicate code - if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { + Import::Canister(canister_name) => { + // duplicate code + if let Some(canister) = + pool.get_first_canister_with_name(canister_name.as_str()) + { let main_file = canister.get_info().get_main_file(); main_file.map(|main_file| main_file.to_owned()) } else { @@ -289,8 +305,13 @@ pub trait CanisterBuilder { } Import::Ic(canister_id) => { if let Some(canister_name) = rev_id_map.get(canister_id.as_str()) { - if let Some(canister) = pool.get_first_canister_with_name(canister_name) { - canister.get_info().get_main_file().map(|main_file| main_file.to_owned()) + if let Some(canister) = + pool.get_first_canister_with_name(canister_name) + { + canister + .get_info() + .get_main_file() + .map(|main_file| main_file.to_owned()) } else { None } @@ -301,32 +322,30 @@ pub trait CanisterBuilder { Import::Lib(_path) => { // Skip libs, all changes by package managers don't modify existing directories but create new ones. continue; - // let i = path.find('/'); - // let pre_path = if let Some(i) = i { - // let expanded = Path::new( - // package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? - // ); - // expanded.join(&path[i+1..]) - // } else { - // Path::new(path.as_str()).to_owned() - // }; - // let path2 = pre_path.to_str().unwrap().to_owned() + ".mo"; - // let path2 = path2.to_string(); - // let path2 = Path::new(&path2); - // if path2.exists() { // TODO: Is it correct order of two variants? - // Some(Path::new(path2).to_owned()) - // } else { - // let path3 = pre_path.join(Path::new("lib.mo")); - // if path3.exists() { - // Some(path3.to_owned()) - // } else { - // bail!("source file has been deleted"); - // } - // } - } - Import::Relative(path) => { - Some(Path::new(&path).to_owned()) + // let i = path.find('/'); + // let pre_path = if let Some(i) = i { + // let expanded = Path::new( + // package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? + // ); + // expanded.join(&path[i+1..]) + // } else { + // Path::new(path.as_str()).to_owned() + // }; + // let path2 = pre_path.to_str().unwrap().to_owned() + ".mo"; + // let path2 = path2.to_string(); + // let path2 = Path::new(&path2); + // if path2.exists() { // TODO: Is it correct order of two variants? + // Some(Path::new(path2).to_owned()) + // } else { + // let path3 = pre_path.join(Path::new("lib.mo")); + // if path3.exists() { + // Some(path3.to_owned()) + // } else { + // bail!("source file has been deleted"); + // } + // } } + Import::Relative(path) => Some(Path::new(&path).to_owned()), }; if let Some(imported_file) = imported_file { let imported_file_metadata = metadata(&imported_file)?; @@ -336,12 +355,16 @@ pub trait CanisterBuilder { }; }; } else { - trace!(logger, "Canister {} already compiled.", canister_info.get_name()); + trace!( + logger, + "Canister {} already compiled.", + canister_info.get_name() + ); return Ok(false); } } }; - + Ok(true) } diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index b4e56edaf7..b7ba6ccde6 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -6,7 +6,7 @@ use crate::lib::canister_info::CanisterInfo; use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE}; -use crate::lib::models::canister::{CanisterPool, ImportsTracker, Import}; +use crate::lib::models::canister::{CanisterPool, Import, ImportsTracker}; use crate::lib::package_arguments::{self, PackageArguments}; use crate::util::assets::management_idl; use anyhow::Context; @@ -42,10 +42,15 @@ impl MotokoBuilder { /// Add imports originating from canister `info` to the graph `imports` of dependencies. #[context("Failed to find imports for canister at '{}'.", info.as_info::().unwrap().get_main_path().display())] -pub fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut ImportsTracker, pool: &CanisterPool) -> DfxResult<()> { +pub fn add_imports( + cache: &dyn Cache, + info: &CanisterInfo, + imports: &mut ImportsTracker, + pool: &CanisterPool, +) -> DfxResult<()> { let motoko_info = info.as_info::()?; #[context("Failed recursive dependency detection at {}.", file.display())] - fn add_imports_recursive ( + fn add_imports_recursive( cache: &dyn Cache, file: &Path, imports: &mut ImportsTracker, @@ -57,10 +62,13 @@ pub fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut Imports } else { Import::Relative(file.to_path_buf()) }; - if imports.nodes.get(&parent).is_some() { // The item is already in the graph. + if imports.nodes.get(&parent).is_some() { + // The item is already in the graph. return Ok(()); } else { - imports.nodes.insert(parent.clone(), imports.graph.add_node(parent.clone())); + imports + .nodes + .insert(parent.clone(), imports.graph.add_node(parent.clone())); } let mut command = cache.get_binary_command("moc")?; @@ -76,25 +84,48 @@ pub fn add_imports(cache: &dyn Cache, info: &CanisterInfo, imports: &mut Imports Import::Relative(path) => { add_imports_recursive(cache, path.as_path(), imports, pool, None)?; } - Import::Canister(canister_name) => { // duplicate code - if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { + Import::Canister(canister_name) => { + // duplicate code + if let Some(canister) = + pool.get_first_canister_with_name(canister_name.as_str()) + { let main_file = canister.get_info().get_main_file(); if let Some(main_file) = main_file { - add_imports_recursive(cache, Path::new(main_file), imports, pool, Some(canister.get_info()))?; + add_imports_recursive( + cache, + Path::new(main_file), + imports, + pool, + Some(canister.get_info()), + )?; } } } _ => {} } - let parent_node_index = *imports.nodes.entry(parent.clone()).or_insert_with(|| imports.graph.add_node(parent.clone())); - let child_node_index = *imports.nodes.entry(child.clone()).or_insert_with(|| imports.graph.add_node(child.clone())); - imports.graph.update_edge(parent_node_index, child_node_index, ()); + let parent_node_index = *imports + .nodes + .entry(parent.clone()) + .or_insert_with(|| imports.graph.add_node(parent.clone())); + let child_node_index = *imports + .nodes + .entry(child.clone()) + .or_insert_with(|| imports.graph.add_node(child.clone())); + imports + .graph + .update_edge(parent_node_index, child_node_index, ()); } Ok(()) } - add_imports_recursive(cache, motoko_info.get_main_path(), imports, pool, Some(info))?; + add_imports_recursive( + cache, + motoko_info.get_main_path(), + imports, + pool, + Some(info), + )?; Ok(()) } @@ -106,18 +137,27 @@ impl CanisterBuilder for MotokoBuilder { pool: &CanisterPool, info: &CanisterInfo, ) -> DfxResult> { - add_imports(self.cache.as_ref(), info, &mut pool.imports.borrow_mut(), pool)?; + add_imports( + self.cache.as_ref(), + info, + &mut pool.imports.borrow_mut(), + pool, + )?; // TODO: In some reason, the following line is needed only for `deploy`, not for `build`. let graph = &pool.imports.borrow().graph; match petgraph::algo::toposort(&pool.imports.borrow().graph, None) { Ok(order) => { - Ok(order.into_iter().filter_map(|id| match graph.node_weight(id) { - Some(Import::Canister(name)) => { - pool.get_first_canister_with_name(name.as_str()) // TODO: a little inefficient - } - _ => None, - }).map(|canister| canister.canister_id()).collect()) + Ok(order + .into_iter() + .filter_map(|id| match graph.node_weight(id) { + Some(Import::Canister(name)) => { + pool.get_first_canister_with_name(name.as_str()) // TODO: a little inefficient + } + _ => None, + }) + .map(|canister| canister.canister_id()) + .collect()) } Err(err) => { let message = match graph.node_weight(err.node_id()) { @@ -127,7 +167,7 @@ impl CanisterBuilder for MotokoBuilder { return Err(DfxError::new(BuildError::DependencyError(format!( "Found circular dependency: {}", message - )))); + )))); } } } @@ -163,7 +203,11 @@ impl CanisterBuilder for MotokoBuilder { .with_context(|| format!("Failed to create {}.", idl_dir_path.to_string_lossy()))?; // If the management canister is being imported, emit the candid file. - if pool.imports.borrow().nodes.contains_key(&Import::Ic("aaaaa-aa".to_string())) + if pool + .imports + .borrow() + .nodes + .contains_key(&Import::Ic("aaaaa-aa".to_string())) { let management_idl_path = idl_dir_path.join("aaaaa-aa.did"); dfx_core::fs::write(management_idl_path, management_idl()?)?; @@ -174,16 +218,18 @@ impl CanisterBuilder for MotokoBuilder { let package_arguments = package_arguments::load(cache.as_ref(), motoko_info.get_packtool())?; let mut package_arguments_map = BTreeMap::<&str, &str>::new(); - { // block + { + // block let mut i = 0; while i + 3 <= package_arguments.len() { if package_arguments[i] == "--package" { - package_arguments_map.insert(&package_arguments[i+1], &package_arguments[i+2]); + package_arguments_map + .insert(&package_arguments[i + 1], &package_arguments[i + 2]); i += 3; } else { i += 1; } - }; + } } let moc_arguments = match motoko_info.get_args() { @@ -222,7 +268,8 @@ impl CanisterBuilder for MotokoBuilder { }; motoko_compile(&self.logger, cache.as_ref(), ¶ms)?; - Ok(BuildOutput { // duplicate code + Ok(BuildOutput { + // duplicate code canister_id: canister_info .get_canister_id() .expect("Could not find canister ID."), diff --git a/src/dfx/src/lib/graph/mod.rs b/src/dfx/src/lib/graph/mod.rs index 6b49d4e2b3..f5d0d1a538 100644 --- a/src/dfx/src/lib/graph/mod.rs +++ b/src/dfx/src/lib/graph/mod.rs @@ -1 +1 @@ -pub mod traverse_filtered; \ No newline at end of file +pub mod traverse_filtered; diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index a07d059244..155dc71893 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -1,5 +1,9 @@ // TODO: Somebody, adopt this code (and DFS) to `petgraph`. -use petgraph::{data::DataMap, visit::{Bfs, IntoNeighborsDirected, VisitMap}, Direction::Incoming}; +use petgraph::{ + data::DataMap, + visit::{Bfs, IntoNeighborsDirected, VisitMap}, + Direction::Incoming, +}; use crate::lib::error::DfxResult; @@ -10,18 +14,22 @@ pub struct BfsFiltered { impl BfsFiltered { pub fn new(base: Bfs) -> Self { - Self { - base - } + Self { base } } /// TODO: Refactor: Extract `iter` function from here. - pub fn traverse2(&mut self, graph: G, mut predicate: P, mut call: C) -> DfxResult<()> - where C: FnMut(&NodeId, &NodeId) -> DfxResult<()>, - G: IntoNeighborsDirected + DataMap, - P: FnMut(&NodeId) -> DfxResult, - NodeId: Copy + Eq, - VM: VisitMap, + pub fn traverse2( + &mut self, + graph: G, + mut predicate: P, + mut call: C, + ) -> DfxResult<()> + where + C: FnMut(&NodeId, &NodeId) -> DfxResult<()>, + G: IntoNeighborsDirected + DataMap, + P: FnMut(&NodeId) -> DfxResult, + NodeId: Copy + Eq, + VM: VisitMap, { while let Some(source_child_id) = &self.base.next(graph) { if predicate(source_child_id)? { @@ -45,4 +53,4 @@ impl BfsFiltered { } Ok(()) } -} \ No newline at end of file +} diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 275cbff853..1c144711b1 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -568,28 +568,45 @@ impl CanisterPool { ) -> DfxResult> { let real_canisters_to_build: Vec<_> = match canisters_to_build { Some(ref canisters_to_build) => canisters_to_build.clone(), // TODO: Remove `clone()` - None => self.canisters.iter().map(|canister| canister.get_name().to_string()).collect(), + None => self + .canisters + .iter() + .map(|canister| canister.get_name().to_string()) + .collect(), }; - for canister in &self.canisters { // a little inefficient - let contains = real_canisters_to_build.iter().contains(&canister.get_info().get_name().to_string()); + for canister in &self.canisters { + // a little inefficient + let contains = real_canisters_to_build + .iter() + .contains(&canister.get_info().get_name().to_string()); if contains { let canister_info = &canister.info; // TODO: Ignored return value is a hack. - let _deps: Vec = canister.builder.get_dependencies(self, canister_info)?; + let _deps: Vec = + canister.builder.get_dependencies(self, canister_info)?; } - canister.builder.read_dependencies(self, canister.get_info(), cache)?; // TODO: It is called multiple times during the flow. + canister + .builder + .read_dependencies(self, canister.get_info(), cache)?; // TODO: It is called multiple times during the flow. } let source_graph = &self.imports.borrow().graph; let source_ids = &self.imports.borrow().nodes; - let start: Vec<_> = - real_canisters_to_build.iter().map(|name| Import::Canister(name.clone())).collect(); - let start: Vec<_> = start.into_iter().filter_map(|node| if let Some(&id) = source_ids.get(&node) { - Some(id) - } else { - None - }).collect(); + let start: Vec<_> = real_canisters_to_build + .iter() + .map(|name| Import::Canister(name.clone())) + .collect(); + let start: Vec<_> = start + .into_iter() + .filter_map(|node| { + if let Some(&id) = source_ids.get(&node) { + Some(id) + } else { + None + } + }) + .collect(); // Transform the graph of file dependencies to graph of canister dependencies. // For this do DFS for each of `real_canisters_to_build`. let mut dest_graph: DiGraph = DiGraph::new(); @@ -603,8 +620,13 @@ impl CanisterPool { panic!("programming error"); } }; - let parent_canister = self.get_first_canister_with_name(parent_name).unwrap().canister_id(); - dest_id_set.entry(start_node).or_insert_with(|| dest_graph.add_node(parent_canister)); + let parent_canister = self + .get_first_canister_with_name(parent_name) + .unwrap() + .canister_id(); + dest_id_set + .entry(start_node) + .or_insert_with(|| dest_graph.add_node(parent_canister)); let bfs = Bfs::new(&source_graph, start_node); let mut filtered_bfs = BfsFiltered::new(bfs); @@ -647,27 +669,38 @@ impl CanisterPool { } )?; } - + Ok(dest_graph) } fn canister_dependencies(&self, canisters_to_build: &[&Arc]) -> Vec> { - let iter = canisters_to_build.iter() + let iter = canisters_to_build + .iter() .flat_map(|&canister| { // TODO: Is `unwrap` on the next line legit? - let parent_node = *self.imports.borrow().nodes.get(&Import::Canister(canister.as_ref().get_name().to_owned())).unwrap(); + let parent_node = *self + .imports + .borrow() + .nodes + .get(&Import::Canister(canister.as_ref().get_name().to_owned())) + .unwrap(); let imports = self.imports.borrow(); let neighbors = imports.graph.neighbors(parent_node); neighbors - .filter_map(|id| imports.nodes.iter() - .find_map(move |(k, v)| if v == &id { Some(k.clone()) } else { None })) // TODO: slow - .filter_map(|import| + .filter_map(|id| { + imports + .nodes + .iter() + .find_map(move |(k, v)| if v == &id { Some(k.clone()) } else { None }) + }) // TODO: slow + .filter_map(|import| { if let Import::Canister(name) = import { - self.get_first_canister_with_name(&name).map(|canister| (name, canister)) + self.get_first_canister_with_name(&name) + .map(|canister| (name, canister)) } else { None } - ) + }) .collect::>() }) .collect::>(); // eliminate duplicates @@ -676,7 +709,12 @@ impl CanisterPool { /// TODO: Duplicate entity domain `canisters_to_build` and `build_config.canisters_to_build`. #[context("Failed step_prebuild_all.")] - fn step_prebuild_all(&self, log: &Logger, build_config: &BuildConfig, canisters_to_build: &[&Arc]) -> DfxResult<()> { + fn step_prebuild_all( + &self, + log: &Logger, + build_config: &BuildConfig, + canisters_to_build: &[&Arc], + ) -> DfxResult<()> { // moc expects all .did files of dependencies to be in with name .did. // Copy .did files into this temporary directory. for canister in self.canister_dependencies(canisters_to_build) { @@ -787,7 +825,8 @@ impl CanisterPool { canisters_to_build: &Option>, ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); - let graph = self.build_dependencies_graph(canisters_to_build.clone(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? + let graph = + self.build_dependencies_graph(canisters_to_build.clone(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { Some(canister_id) => match self.get_canister_info(canister_id) { @@ -818,7 +857,11 @@ impl CanisterPool { let order = self.build_order(env, &build_config.canisters_to_build.clone())?; // TODO: Eliminate `clone`. // TODO: The next line is slow and confusing code. - let canisters_to_build: Vec<&Arc> = self.canisters.iter().filter(|c| order.contains(&c.canister_id())).collect(); + let canisters_to_build: Vec<&Arc> = self + .canisters + .iter() + .filter(|c| order.contains(&c.canister_id())) + .collect(); self.step_prebuild_all(log, build_config, canisters_to_build.as_slice()) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; @@ -829,17 +872,23 @@ impl CanisterPool { if canisters_to_build .iter() .map(|c| c.get_name()) - .contains(&canister.get_name()) // TODO: slow + .contains(&canister.get_name()) + // TODO: slow { trace!(log, "Building canister '{}'.", canister.get_name()); } else { trace!(log, "Not building canister '{}'.", canister.get_name()); continue; } - if !canister.builder.should_build(self, &canister.info, env.get_cache().as_ref(), env.get_logger())? { + if !canister.builder.should_build( + self, + &canister.info, + env.get_cache().as_ref(), + env.get_logger(), + )? { continue; } - + result.push( self.step_prebuild(build_config, canister) .map_err(|e| { @@ -884,7 +933,12 @@ impl CanisterPool { /// /// TODO: `log` can be got from `env`, can't it? #[context("Failed while trying to build all canisters.")] - pub async fn build_or_fail(&self, env: &dyn Environment, log: &Logger, build_config: &BuildConfig) -> DfxResult<()> { + pub async fn build_or_fail( + &self, + env: &dyn Environment, + log: &Logger, + build_config: &BuildConfig, + ) -> DfxResult<()> { self.download(build_config).await?; let outputs = self.build(env, log, build_config)?; diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 30c1e31684..ef9566d430 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -10,9 +10,7 @@ use crate::lib::operations::canister::deploy_canisters::DeployMode::{ ComputeEvidence, ForceReinstallSingleCanister, NormalDeploy, PrepareForProposal, }; use crate::lib::operations::canister::motoko_playground::reserve_canister_with_playground; -use crate::lib::operations::canister::{ - create_canister, install_canister::install_canister, -}; +use crate::lib::operations::canister::{create_canister, install_canister::install_canister}; use crate::util::clap::subnet_selection_opt::SubnetSelectionType; use anyhow::{anyhow, bail, Context}; use candid::Principal; @@ -105,27 +103,44 @@ pub async fn deploy_canisters( .collect(), }; - // TODO: `build_order` is called two times during deployment of a new canister. + // TODO: `build_order` is called two times during deployment of a new canister. let order = canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. - let order_names: Vec = order.iter() - .map(|canister| canister_pool.get_canister(canister).unwrap().get_name().to_owned()).collect(); + let order_names: Vec = order + .iter() + .map(|canister| { + canister_pool + .get_canister(canister) + .unwrap() + .get_name() + .to_owned() + }) + .collect(); let canisters_to_install: &Vec = &canisters_to_build .clone() .into_iter() - .filter(|canister_name| - !pull_canisters_in_config.contains_key(canister_name) && - (some_canister == Some(canister_name) || // do deploy a canister that was explicitly specified + .filter(|canister_name| { + !pull_canisters_in_config.contains_key(canister_name) + && (some_canister == Some(canister_name) || // do deploy a canister that was explicitly specified // TODO: This is a hack. config.get_config().get_canister_config(canister_name).map_or( - true, |canister_config| canister_config.deploy))) - .filter(|canister_name| + true, |canister_config| canister_config.deploy)) + }) + .filter(|canister_name| { if let Some(canister) = canister_pool.get_first_canister_with_name(canister_name) { - canister.builder.should_build(&canister_pool, &canister.info, env.get_cache().as_ref(), env.get_logger()).unwrap() // FIXME: `unwrap()` + canister + .builder + .should_build( + &canister_pool, + &canister.info, + env.get_cache().as_ref(), + env.get_logger(), + ) + .unwrap() // FIXME: `unwrap()` } else { false } - ) + }) .collect(); if some_canister.is_some() { From 6307b5dc35d856c9c2e420f7a8b0a7ec8f80335b Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 12:25:55 +0300 Subject: [PATCH 184/354] fixing a bug --- .../operations/canister/deploy_canisters.rs | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index ef9566d430..de3dd40184 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -13,6 +13,7 @@ use crate::lib::operations::canister::motoko_playground::reserve_canister_with_p use crate::lib::operations::canister::{create_canister, install_canister::install_canister}; use crate::util::clap::subnet_selection_opt::SubnetSelectionType; use anyhow::{anyhow, bail, Context}; +use itertools::Itertools; use candid::Principal; use dfx_core::config::model::canister_id_store::CanisterIdStore; use dfx_core::config::model::dfinity::Config; @@ -126,22 +127,27 @@ pub async fn deploy_canisters( config.get_config().get_canister_config(canister_name).map_or( true, |canister_config| canister_config.deploy)) }) - .filter(|canister_name| { - if let Some(canister) = canister_pool.get_first_canister_with_name(canister_name) { - canister + .map(|canister_name| -> DfxResult> { + Ok(if let Some(canister) = canister_pool.get_first_canister_with_name(canister_name.as_str()) { + if canister .builder .should_build( &canister_pool, &canister.info, env.get_cache().as_ref(), env.get_logger(), - ) - .unwrap() // FIXME: `unwrap()` + )? + { + Some(canister_name) + } else { + None + } } else { - false - } + None + }) }) - .collect(); + .filter_map(|v| v.transpose()) + .try_collect()?; if some_canister.is_some() { info!(log, "Deploying: {}", canisters_to_install.join(" ")); From e9e196e589144d89a2b246bc72bfe4d6e0eae423 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 12:41:11 +0300 Subject: [PATCH 185/354] removed a confusing comment --- src/dfx/src/lib/builders/motoko.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index b7ba6ccde6..41af71f831 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -143,7 +143,6 @@ impl CanisterBuilder for MotokoBuilder { &mut pool.imports.borrow_mut(), pool, )?; - // TODO: In some reason, the following line is needed only for `deploy`, not for `build`. let graph = &pool.imports.borrow().graph; match petgraph::algo::toposort(&pool.imports.borrow().graph, None) { From 3ef1b1f6f1960de3ba7803a15b45cbb3c77f1f08 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 13:08:35 +0300 Subject: [PATCH 186/354] cargo fmt --- .../operations/canister/deploy_canisters.rs | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index de3dd40184..112c1d7869 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -13,7 +13,6 @@ use crate::lib::operations::canister::motoko_playground::reserve_canister_with_p use crate::lib::operations::canister::{create_canister, install_canister::install_canister}; use crate::util::clap::subnet_selection_opt::SubnetSelectionType; use anyhow::{anyhow, bail, Context}; -use itertools::Itertools; use candid::Principal; use dfx_core::config::model::canister_id_store::CanisterIdStore; use dfx_core::config::model::dfinity::Config; @@ -24,6 +23,7 @@ use ic_utils::interfaces::management_canister::attributes::{ }; use ic_utils::interfaces::management_canister::builders::InstallMode; use icrc_ledger_types::icrc1::account::Subaccount; +use itertools::Itertools; use slog::info; use std::convert::TryFrom; use std::path::{Path, PathBuf}; @@ -128,23 +128,24 @@ pub async fn deploy_canisters( true, |canister_config| canister_config.deploy)) }) .map(|canister_name| -> DfxResult> { - Ok(if let Some(canister) = canister_pool.get_first_canister_with_name(canister_name.as_str()) { - if canister - .builder - .should_build( + Ok( + if let Some(canister) = + canister_pool.get_first_canister_with_name(canister_name.as_str()) + { + if canister.builder.should_build( &canister_pool, &canister.info, env.get_cache().as_ref(), env.get_logger(), - )? - { - Some(canister_name) + )? { + Some(canister_name) + } else { + None + } } else { None - } - } else { - None - }) + }, + ) }) .filter_map(|v| v.transpose()) .try_collect()?; From 1e9d90edbc0d11232287e20654622dc4fc9f59b0 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 13:37:45 +0300 Subject: [PATCH 187/354] bug fix --- src/dfx/src/lib/builders/mod.rs | 35 +++++++++++---------------------- 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index f52318de39..f3950e4452 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -322,30 +322,19 @@ pub trait CanisterBuilder { Import::Lib(_path) => { // Skip libs, all changes by package managers don't modify existing directories but create new ones. continue; - // let i = path.find('/'); - // let pre_path = if let Some(i) = i { - // let expanded = Path::new( - // package_arguments_map.get(&path[..i]).ok_or_else(|| anyhow!("nonexisting package"))? - // ); - // expanded.join(&path[i+1..]) - // } else { - // Path::new(path.as_str()).to_owned() - // }; - // let path2 = pre_path.to_str().unwrap().to_owned() + ".mo"; - // let path2 = path2.to_string(); - // let path2 = Path::new(&path2); - // if path2.exists() { // TODO: Is it correct order of two variants? - // Some(Path::new(path2).to_owned()) - // } else { - // let path3 = pre_path.join(Path::new("lib.mo")); - // if path3.exists() { - // Some(path3.to_owned()) - // } else { - // bail!("source file has been deleted"); - // } - // } } - Import::Relative(path) => Some(Path::new(&path).to_owned()), + Import::Relative(path) => { + Some(if path.exists() { + Path::new(path).to_owned() + } else { + let path2 = path.join(Path::new("lib.mo")); + if path2.exists() { + path2.to_owned() + } else { + bail!("source file has been deleted"); + } + }) + } }; if let Some(imported_file) = imported_file { let imported_file_metadata = metadata(&imported_file)?; From d3b07efac87ef6683c9afc0c6a74db75bab2d00e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 14:00:44 +0300 Subject: [PATCH 188/354] fixing bug --- src/dfx/src/lib/builders/mod.rs | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index f3950e4452..8ac5634acc 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -324,20 +324,22 @@ pub trait CanisterBuilder { continue; } Import::Relative(path) => { - Some(if path.exists() { - Path::new(path).to_owned() - } else { - let path2 = path.join(Path::new("lib.mo")); - if path2.exists() { - path2.to_owned() - } else { - bail!("source file has been deleted"); - } - }) + Some(Path::new(path).to_owned()) + // FIXME: Need to check the full path. + // Some(if path.exists() { + // Path::new(path).to_owned() + // } else { + // let path2 = path.join(Path::new("lib.mo")); + // if path2.exists() { + // path2.to_owned() + // } else { + // bail!("source file has been deleted"); + // } + // }) } }; if let Some(imported_file) = imported_file { - let imported_file_metadata = metadata(&imported_file)?; + let imported_file_metadata = metadata(&imported_file)?; // FIXME: Need to check the full path. let imported_file_time = imported_file_metadata.modified()?; if imported_file_time > wasm_file_time { break; From 41b19d165b3a3f1e5fdb3e55d24d04df80465ad5 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 14:01:17 +0300 Subject: [PATCH 189/354] code reorganized --- e2e/tests-dfx/make_like.bash | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 046adb2a79..7ebf4b91b8 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -48,8 +48,8 @@ teardown() { touch lib.mo assert_command dfx deploy -vv dependency - assert_contains '"moc-wrapper" "dependency.mo"' assert_not_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' } @test "trying to break dependency compiling: build" { @@ -87,6 +87,6 @@ teardown() { touch lib.mo assert_command dfx build -vv dependency - assert_contains '"moc-wrapper" "dependency.mo"' assert_not_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' } From 5f62f4a0275ab0fae9c26330defd0cff516845c7 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 14:01:41 +0300 Subject: [PATCH 190/354] temporarily disabled a test --- e2e/tests-dfx/make_like.bash | 77 ++++++++++++++++++------------------ 1 file changed, 39 insertions(+), 38 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 7ebf4b91b8..3c9d15f9fc 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -52,41 +52,42 @@ teardown() { assert_contains '"moc-wrapper" "dependency.mo"' } -@test "trying to break dependency compiling: build" { - dfx_start - - assert_command dfx canister create dependency - assert_command dfx canister create dependent - assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - touch dependent.mo - assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_not_contains '"moc-wrapper" "dependency.mo"' - - touch dependency.mo - assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - touch dependency.mo - assert_command dfx build -vv dependency - assert_not_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_not_contains '"moc-wrapper" "dependency.mo"' - - touch lib.mo - assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - touch lib.mo - assert_command dfx build -vv dependency - assert_not_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' -} +# FIXME: Uncomment. +# @test "trying to break dependency compiling: build" { +# dfx_start + +# assert_command dfx canister create dependency +# assert_command dfx canister create dependent +# assert_command dfx build -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# touch dependent.mo +# assert_command dfx build -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_not_contains '"moc-wrapper" "dependency.mo"' + +# touch dependency.mo +# assert_command dfx build -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# touch dependency.mo +# assert_command dfx build -vv dependency +# assert_not_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# assert_command dfx build -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_not_contains '"moc-wrapper" "dependency.mo"' + +# touch lib.mo +# assert_command dfx build -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# touch lib.mo +# assert_command dfx build -vv dependency +# assert_not_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' +# } From 293c1203b511a7e3b2b41f35b4fa83995e9cb7f2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 15:00:58 +0300 Subject: [PATCH 191/354] refactor --- src/dfx/src/lib/models/canister.rs | 47 +++++++++++++++--------------- 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 1c144711b1..90017c85ca 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -880,45 +880,44 @@ impl CanisterPool { trace!(log, "Not building canister '{}'.", canister.get_name()); continue; } - if !canister.builder.should_build( + if canister.builder.should_build( self, &canister.info, env.get_cache().as_ref(), env.get_logger(), )? { - continue; - } - - result.push( - self.step_prebuild(build_config, canister) - .map_err(|e| { - BuildError::PreBuildStepFailed( - *canister_id, - canister.get_name().to_string(), - Box::new(e), - ) - }) - .and_then(|_| { - self.step_build(build_config, canister).map_err(|e| { - BuildError::BuildStepFailed( + result.push( + self.step_prebuild(build_config, canister) + .map_err(|e| { + BuildError::PreBuildStepFailed( *canister_id, canister.get_name().to_string(), Box::new(e), ) }) - }) - .and_then(|o| { - self.step_postbuild(build_config, canister, o) - .map_err(|e| { - BuildError::PostBuildStepFailed( + .and_then(|_| { + self.step_build(build_config, canister).map_err(|e| { + BuildError::BuildStepFailed( *canister_id, canister.get_name().to_string(), Box::new(e), ) }) - .map(|_| o) - }), - ); + }) + .and_then(|o| { + self.step_postbuild(build_config, canister, o) + .map_err(|e| { + BuildError::PostBuildStepFailed( + *canister_id, + canister.get_name().to_string(), + Box::new(e), + ) + }) + .map(|_| o) + }), + ); + } + println!("YYY: {}", canister.info.get_name()); } } From 2f31661b1d61c23981431a827a06ec812481843b Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 16:07:16 +0300 Subject: [PATCH 192/354] bug fixes --- src/dfx/src/lib/builders/mod.rs | 50 ++++++++++++++++++++------------- 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 8ac5634acc..c9df3fe1d8 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -267,11 +267,11 @@ pub trait CanisterBuilder { let output_wasm_path = canister_info.get_output_wasm_path(); // from principal to name: - let rev_id_map: BTreeMap = pool - .get_canister_list() - .iter() - .map(|&c| (c.canister_id().to_text(), c.get_name().to_string())) - .collect(); + // let rev_id_map: BTreeMap = pool + // .get_canister_list() + // .iter() + // .map(|&c| (c.canister_id().to_text(), c.get_name().to_string())) + // .collect(); self.read_dependencies(pool, canister_info, cache)?; @@ -288,11 +288,17 @@ pub trait CanisterBuilder { panic!("programming error"); }; let mut import_iter = Bfs::new(&imports.graph, start); + let mut top_level = true; // the first canister is our own canister and therefore is a dependency. loop { + let top_level1 = top_level; + top_level = false; if let Some(import) = import_iter.next(&imports.graph) { let subnode = &imports.graph[import]; let imported_file = match subnode { Import::Canister(canister_name) => { + if !top_level1 { + continue; + } // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) @@ -303,21 +309,22 @@ pub trait CanisterBuilder { None } } - Import::Ic(canister_id) => { - if let Some(canister_name) = rev_id_map.get(canister_id.as_str()) { - if let Some(canister) = - pool.get_first_canister_with_name(canister_name) - { - canister - .get_info() - .get_main_file() - .map(|main_file| main_file.to_owned()) - } else { - None - } - } else { - None - } + Import::Ic(_canister_id) => { + continue; + // if let Some(canister_name) = rev_id_map.get(canister_id.as_str()) { + // if let Some(canister) = + // pool.get_first_canister_with_name(canister_name) + // { + // canister + // .get_info() + // .get_main_file() + // .map(|main_file| main_file.to_owned()) + // } else { + // None + // } + // } else { + // None + // } } Import::Lib(_path) => { // Skip libs, all changes by package managers don't modify existing directories but create new ones. @@ -341,6 +348,9 @@ pub trait CanisterBuilder { if let Some(imported_file) = imported_file { let imported_file_metadata = metadata(&imported_file)?; // FIXME: Need to check the full path. let imported_file_time = imported_file_metadata.modified()?; + println!("XXX: {} {:?} <= {}", // FIXME: Remove. + imported_file_time > wasm_file_time, subnode, output_wasm_path.to_str().unwrap(), + ); if imported_file_time > wasm_file_time { break; }; From f9a42280f40dc46f0ba2dcc3a3a3a6019ead2a46 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 16:09:37 +0300 Subject: [PATCH 193/354] bug fixes --- e2e/tests-dfx/make_like.bash | 77 +++++++++---------- src/dfx/src/commands/build.rs | 26 ++++--- src/dfx/src/lib/builders/mod.rs | 12 +-- src/dfx/src/lib/models/canister.rs | 25 ++++-- .../operations/canister/deploy_canisters.rs | 4 +- 5 files changed, 81 insertions(+), 63 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 3c9d15f9fc..7ebf4b91b8 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -52,42 +52,41 @@ teardown() { assert_contains '"moc-wrapper" "dependency.mo"' } -# FIXME: Uncomment. -# @test "trying to break dependency compiling: build" { -# dfx_start - -# assert_command dfx canister create dependency -# assert_command dfx canister create dependent -# assert_command dfx build -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# touch dependent.mo -# assert_command dfx build -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_not_contains '"moc-wrapper" "dependency.mo"' - -# touch dependency.mo -# assert_command dfx build -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# touch dependency.mo -# assert_command dfx build -vv dependency -# assert_not_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# assert_command dfx build -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_not_contains '"moc-wrapper" "dependency.mo"' - -# touch lib.mo -# assert_command dfx build -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# touch lib.mo -# assert_command dfx build -vv dependency -# assert_not_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' -# } +@test "trying to break dependency compiling: build" { + dfx_start + + assert_command dfx canister create dependency + assert_command dfx canister create dependent + assert_command dfx build -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + touch dependent.mo + assert_command dfx build -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_not_contains '"moc-wrapper" "dependency.mo"' + + touch dependency.mo + assert_command dfx build -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + touch dependency.mo + assert_command dfx build -vv dependency + assert_not_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + assert_command dfx build -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_not_contains '"moc-wrapper" "dependency.mo"' + + touch lib.mo + assert_command dfx build -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + touch lib.mo + assert_command dfx build -vv dependency + assert_not_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' +} diff --git a/src/dfx/src/commands/build.rs b/src/dfx/src/commands/build.rs index 250fb74168..7451bcc33b 100644 --- a/src/dfx/src/commands/build.rs +++ b/src/dfx/src/commands/build.rs @@ -54,15 +54,15 @@ pub fn exec(env1: &dyn Environment, opts: CanisterBuildOpts) -> DfxResult { .get_canister_names_with_dependencies(opts.canister_name.as_deref())?; let canisters_to_load = add_canisters_with_ids(&required_canisters, &env, &config); - let canisters_to_build = required_canisters - .into_iter() - .filter(|canister_name| { - !config - .get_config() - .is_remote_canister(canister_name, &env.get_network_descriptor().name) - .unwrap_or(false) - }) - .collect(); + // let canisters_to_build = required_canisters + // .into_iter() + // .filter(|canister_name| { + // !config + // .get_config() + // .is_remote_canister(canister_name, &env.get_network_descriptor().name) + // .unwrap_or(false) + // }) + // .collect(); let canister_pool = CanisterPool::load(&env, build_mode_check, &canisters_to_load)?; @@ -88,7 +88,13 @@ pub fn exec(env1: &dyn Environment, opts: CanisterBuildOpts) -> DfxResult { let build_config = BuildConfig::from_config(&config, env.get_network_descriptor().is_playground())? .with_build_mode_check(build_mode_check) - .with_canisters_to_build(canisters_to_build) + .with_canisters_to_build(if let Some(canister) = opts.canister_name { + vec![canister] // hacky // TODO: Give sensible names to `*canisters_to_build` variables. + } else { + config.get_config().get_canister_names_with_dependencies(None)? + // canister_pool.get_canister_list().iter().map(|&canister| canister.get_name().to_owned()) // hacky + // .collect() + }) .with_env_file(env_file); runtime.block_on(canister_pool.build_or_fail(env1, logger, &build_config))?; diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index c9df3fe1d8..575a021eb8 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -288,17 +288,17 @@ pub trait CanisterBuilder { panic!("programming error"); }; let mut import_iter = Bfs::new(&imports.graph, start); - let mut top_level = true; // the first canister is our own canister and therefore is a dependency. + // let mut top_level = true; // the first canister is our own canister and therefore is a dependency. loop { - let top_level1 = top_level; - top_level = false; + // let top_level1 = top_level; + // top_level = false; if let Some(import) = import_iter.next(&imports.graph) { let subnode = &imports.graph[import]; let imported_file = match subnode { Import::Canister(canister_name) => { - if !top_level1 { - continue; - } + // if !top_level1 { + // continue; + // } // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 90017c85ca..14a07bed55 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -854,14 +854,24 @@ impl CanisterPool { log: &Logger, build_config: &BuildConfig, ) -> DfxResult>> { + println!("ORDX.len(): {}", build_config.canisters_to_build.as_ref().unwrap().len()); // FIXME: Remove. let order = self.build_order(env, &build_config.canisters_to_build.clone())?; // TODO: Eliminate `clone`. + println!("ORD.len(): {}", order.len()); // FIXME: Remove. - // TODO: The next line is slow and confusing code. - let canisters_to_build: Vec<&Arc> = self - .canisters - .iter() - .filter(|c| order.contains(&c.canister_id())) - .collect(); + // TODO: The next statement is slow and confusing code. + let canisters_to_build: Vec<&Arc> = if let Some(canisters) = build_config.canisters_to_build.clone() { + self + .canisters + .iter() + .filter(|c| canisters.contains(&c.get_name().to_string())) + .collect() + } else { + self + .canisters + .iter() + // .filter(|c| order.contains(&c.canister_id())) + .collect() + }; self.step_prebuild_all(log, build_config, canisters_to_build.as_slice()) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; @@ -886,6 +896,7 @@ impl CanisterPool { env.get_cache().as_ref(), env.get_logger(), )? { + println!("AAA: {}", canister.info.get_name()); // FIXME: Remove. result.push( self.step_prebuild(build_config, canister) .map_err(|e| { @@ -917,7 +928,7 @@ impl CanisterPool { }), ); } - println!("YYY: {}", canister.info.get_name()); + println!("YYY: {}", canister.info.get_name()); // FIXME: Remove. } } diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 112c1d7869..0e45660610 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -177,10 +177,11 @@ pub async fn deploy_canisters( info!(env.get_logger(), "All canisters have already been created."); } + println!("RRR: {:?}", &canisters_to_build); build_canisters( env, // &order_names, - &order_names, + &canisters_to_build, &config, env_file.clone(), &canister_pool, @@ -334,6 +335,7 @@ async fn build_canisters( // let build_mode_check = false; // let canister_pool = CanisterPool::load(env, build_mode_check, canisters_to_load)?; + println!("TTT: {:?}", canisters_to_build); let build_config = BuildConfig::from_config(config, env.get_network_descriptor().is_playground())? .with_canisters_to_build(canisters_to_build.into()) From c3885473fb9f5d38646928e5e633c3475181e953 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 16:15:05 +0300 Subject: [PATCH 194/354] bug fix --- src/dfx/src/lib/models/canister.rs | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 14a07bed55..fdf1b2e659 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -879,17 +879,12 @@ impl CanisterPool { let mut result = Vec::new(); for canister_id in &order { if let Some(canister) = self.get_canister(canister_id) { - if canisters_to_build - .iter() - .map(|c| c.get_name()) - .contains(&canister.get_name()) - // TODO: slow - { - trace!(log, "Building canister '{}'.", canister.get_name()); - } else { - trace!(log, "Not building canister '{}'.", canister.get_name()); - continue; - } + trace!(log, "Building canister '{}'.", canister.get_name()); + // TODO: + // } else { + // trace!(log, "Not building canister '{}'.", canister.get_name()); + // continue; + // } if canister.builder.should_build( self, &canister.info, From 948bcc1f10b994cea5a923c84370bad082436135 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 16:28:01 +0300 Subject: [PATCH 195/354] variables renamed --- src/dfx/src/commands/build.rs | 2 +- src/dfx/src/commands/generate.rs | 2 +- src/dfx/src/lib/builders/mod.rs | 8 ++-- src/dfx/src/lib/models/canister.rs | 41 +++++++++---------- .../operations/canister/deploy_canisters.rs | 16 ++++---- 5 files changed, 34 insertions(+), 35 deletions(-) diff --git a/src/dfx/src/commands/build.rs b/src/dfx/src/commands/build.rs index 7451bcc33b..7c880d55e5 100644 --- a/src/dfx/src/commands/build.rs +++ b/src/dfx/src/commands/build.rs @@ -89,7 +89,7 @@ pub fn exec(env1: &dyn Environment, opts: CanisterBuildOpts) -> DfxResult { BuildConfig::from_config(&config, env.get_network_descriptor().is_playground())? .with_build_mode_check(build_mode_check) .with_canisters_to_build(if let Some(canister) = opts.canister_name { - vec![canister] // hacky // TODO: Give sensible names to `*canisters_to_build` variables. + vec![canister] // hacky } else { config.get_config().get_canister_names_with_dependencies(None)? // canister_pool.get_canister_list().iter().map(|&canister| canister.get_name().to_owned()) // hacky diff --git a/src/dfx/src/commands/generate.rs b/src/dfx/src/commands/generate.rs index 46cd43e5f4..438c31d373 100644 --- a/src/dfx/src/commands/generate.rs +++ b/src/dfx/src/commands/generate.rs @@ -66,7 +66,7 @@ pub fn exec(env1: &dyn Environment, opts: GenerateOpts) -> DfxResult { .with_canisters_to_build(canisters_to_generate); if build_config - .canisters_to_build + .user_specified_canisters .as_ref() .map(|v| !v.is_empty()) .unwrap_or(false) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 575a021eb8..1ff6f2abe3 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -641,9 +641,9 @@ pub struct BuildConfig { pub lsp_root: PathBuf, /// The root for all build files. pub build_root: PathBuf, - /// If only a subset of canisters should be built, then canisters_to_build contains these canisters' names. + /// If only a subset of canisters should be built, then user_specified_canisters contains these canisters' names. /// If all canisters should be built, then this is None. - pub canisters_to_build: Option>, + pub user_specified_canisters: Option>, /// If environment variables should be output to a `.env` file, `env_file` is set to its path. pub env_file: Option, } @@ -664,7 +664,7 @@ impl BuildConfig { build_root: canister_root.clone(), idl_root: canister_root.join("idl/"), // TODO: possibly move to `network_root.join("idl/")` lsp_root: network_root.join("lsp/"), - canisters_to_build: None, + user_specified_canisters: None, env_file: config.get_output_env_file(None)?, }) } @@ -678,7 +678,7 @@ impl BuildConfig { pub fn with_canisters_to_build(self, canisters: Vec) -> Self { Self { - canisters_to_build: Some(canisters), + user_specified_canisters: Some(canisters), ..self } } diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index fdf1b2e659..bf38ec5983 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -559,15 +559,15 @@ impl CanisterPool { &self.logger } - /// Build only dependencies relevant for `canisters_to_build`. + /// Build only dependencies relevant for `user_specified_canisters`. #[context("Failed to build dependencies graph for canister pool.")] fn build_dependencies_graph( &self, - canisters_to_build: Option>, + user_specified_canisters: Option>, cache: &dyn Cache, ) -> DfxResult> { - let real_canisters_to_build: Vec<_> = match canisters_to_build { - Some(ref canisters_to_build) => canisters_to_build.clone(), // TODO: Remove `clone()` + let toplevel_canisters: Vec<_> = match user_specified_canisters { + Some(ref user_specified_canisters) => user_specified_canisters.clone(), // TODO: Remove `clone()` None => self .canisters .iter() @@ -577,7 +577,7 @@ impl CanisterPool { for canister in &self.canisters { // a little inefficient - let contains = real_canisters_to_build + let contains = toplevel_canisters .iter() .contains(&canister.get_info().get_name().to_string()); if contains { @@ -593,7 +593,7 @@ impl CanisterPool { let source_graph = &self.imports.borrow().graph; let source_ids = &self.imports.borrow().nodes; - let start: Vec<_> = real_canisters_to_build + let start: Vec<_> = toplevel_canisters .iter() .map(|name| Import::Canister(name.clone())) .collect(); @@ -673,8 +673,8 @@ impl CanisterPool { Ok(dest_graph) } - fn canister_dependencies(&self, canisters_to_build: &[&Arc]) -> Vec> { - let iter = canisters_to_build + fn canister_dependencies(&self, toplevel_canisters: &[&Arc]) -> Vec> { + let iter = toplevel_canisters .iter() .flat_map(|&canister| { // TODO: Is `unwrap` on the next line legit? @@ -707,17 +707,16 @@ impl CanisterPool { iter.values().cloned().collect() } - /// TODO: Duplicate entity domain `canisters_to_build` and `build_config.canisters_to_build`. #[context("Failed step_prebuild_all.")] fn step_prebuild_all( &self, log: &Logger, build_config: &BuildConfig, - canisters_to_build: &[&Arc], + toplevel_canisters: &[&Arc], ) -> DfxResult<()> { // moc expects all .did files of dependencies to be in with name .did. // Copy .did files into this temporary directory. - for canister in self.canister_dependencies(canisters_to_build) { + for canister in self.canister_dependencies(toplevel_canisters) { let maybe_from = if let Some(remote_candid) = canister.info.get_remote_candid() { Some(remote_candid) } else { @@ -803,11 +802,11 @@ impl CanisterPool { &self, build_config: &BuildConfig, _order: &[CanisterId], - canisters_to_build: &[&Arc], + toplevel_canisters: &[&Arc], ) -> DfxResult<()> { // We don't want to simply remove the whole directory, as in the future, // we may want to keep the IDL files downloaded from network. - for canister in self.canister_dependencies(canisters_to_build) { + for canister in self.canister_dependencies(toplevel_canisters) { let idl_root = &build_config.idl_root; let canister_id = canister.canister_id(); let idl_file_path = idl_root.join(canister_id.to_text()).with_extension("did"); @@ -822,11 +821,11 @@ impl CanisterPool { pub fn build_order( &self, env: &dyn Environment, - canisters_to_build: &Option>, + user_specified_canisters: &Option>, ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); let graph = - self.build_dependencies_graph(canisters_to_build.clone(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? + self.build_dependencies_graph(user_specified_canisters.clone(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { Some(canister_id) => match self.get_canister_info(canister_id) { @@ -854,12 +853,12 @@ impl CanisterPool { log: &Logger, build_config: &BuildConfig, ) -> DfxResult>> { - println!("ORDX.len(): {}", build_config.canisters_to_build.as_ref().unwrap().len()); // FIXME: Remove. - let order = self.build_order(env, &build_config.canisters_to_build.clone())?; // TODO: Eliminate `clone`. + println!("ORDX.len(): {}", build_config.user_specified_canisters.as_ref().unwrap().len()); // FIXME: Remove. + let order = self.build_order(env, &build_config.user_specified_canisters.clone())?; // TODO: Eliminate `clone`. println!("ORD.len(): {}", order.len()); // FIXME: Remove. // TODO: The next statement is slow and confusing code. - let canisters_to_build: Vec<&Arc> = if let Some(canisters) = build_config.canisters_to_build.clone() { + let toplevel_canisters: Vec<&Arc> = if let Some(canisters) = build_config.user_specified_canisters.clone() { self .canisters .iter() @@ -873,7 +872,7 @@ impl CanisterPool { .collect() }; - self.step_prebuild_all(log, build_config, canisters_to_build.as_slice()) + self.step_prebuild_all(log, build_config, toplevel_canisters.as_slice()) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; let mut result = Vec::new(); @@ -927,7 +926,7 @@ impl CanisterPool { } } - self.step_postbuild_all(build_config, &order, canisters_to_build.as_slice()) + self.step_postbuild_all(build_config, &order, toplevel_canisters.as_slice()) .map_err(|e| DfxError::new(BuildError::PostBuildAllStepFailed(Box::new(e))))?; Ok(result) @@ -1019,7 +1018,7 @@ impl CanisterPool { } pub fn canisters_to_build(&self, build_config: &BuildConfig) -> Vec<&Arc> { - if let Some(canister_names) = &build_config.canisters_to_build { + if let Some(canister_names) = &build_config.user_specified_canisters { self.canisters .iter() .filter(|can| canister_names.contains(&can.info.get_name().to_string())) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 0e45660610..60611a5c80 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -84,7 +84,7 @@ pub async fn deploy_canisters( let canisters_to_load = add_canisters_with_ids(&required_canisters, env, &config); let canister_pool = CanisterPool::load(env, true, &canisters_to_load)?; - let canisters_to_build = match deploy_mode { + let toplevel_canisters = match deploy_mode { PrepareForProposal(canister_name) | ComputeEvidence(canister_name) => { vec![canister_name.clone()] } @@ -105,7 +105,7 @@ pub async fn deploy_canisters( }; // TODO: `build_order` is called two times during deployment of a new canister. - let order = canister_pool.build_order(env, &Some(canisters_to_build.clone()))?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. + let order = canister_pool.build_order(env, &Some(toplevel_canisters.clone()))?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. let order_names: Vec = order .iter() .map(|canister| { @@ -117,7 +117,7 @@ pub async fn deploy_canisters( }) .collect(); - let canisters_to_install: &Vec = &canisters_to_build + let canisters_to_install: &Vec = &toplevel_canisters .clone() .into_iter() .filter(|canister_name| { @@ -177,11 +177,11 @@ pub async fn deploy_canisters( info!(env.get_logger(), "All canisters have already been created."); } - println!("RRR: {:?}", &canisters_to_build); + println!("RRR: {:?}", &toplevel_canisters); build_canisters( env, // &order_names, - &canisters_to_build, + &toplevel_canisters, &config, env_file.clone(), &canister_pool, @@ -325,7 +325,7 @@ async fn register_canisters( async fn build_canisters( env: &dyn Environment, // canisters_to_load: &[String], - canisters_to_build: &[String], + toplevel_canisters: &[String], config: &Config, env_file: Option, canister_pool: &CanisterPool, @@ -335,10 +335,10 @@ async fn build_canisters( // let build_mode_check = false; // let canister_pool = CanisterPool::load(env, build_mode_check, canisters_to_load)?; - println!("TTT: {:?}", canisters_to_build); + println!("TTT: {:?}", toplevel_canisters); let build_config = BuildConfig::from_config(config, env.get_network_descriptor().is_playground())? - .with_canisters_to_build(canisters_to_build.into()) + .with_canisters_to_build(toplevel_canisters.into()) .with_env_file(env_file); canister_pool.build_or_fail(env, log, &build_config).await?; Ok(()) From 09d4aa15d969c030b392e423e3660e97cee256b2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 17:11:07 +0300 Subject: [PATCH 196/354] refactor --- src/dfx/src/lib/models/canister.rs | 40 ++++++++++++------------------ 1 file changed, 16 insertions(+), 24 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index bf38ec5983..61338692b3 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -563,23 +563,15 @@ impl CanisterPool { #[context("Failed to build dependencies graph for canister pool.")] fn build_dependencies_graph( &self, - user_specified_canisters: Option>, + toplevel_canisters: &Vec>, cache: &dyn Cache, ) -> DfxResult> { - let toplevel_canisters: Vec<_> = match user_specified_canisters { - Some(ref user_specified_canisters) => user_specified_canisters.clone(), // TODO: Remove `clone()` - None => self - .canisters - .iter() - .map(|canister| canister.get_name().to_string()) - .collect(), - }; - for canister in &self.canisters { // a little inefficient let contains = toplevel_canisters .iter() - .contains(&canister.get_info().get_name().to_string()); + .map(|canister| canister.get_info().get_name()) + .contains(&canister.get_info().get_name()); if contains { let canister_info = &canister.info; // TODO: Ignored return value is a hack. @@ -595,7 +587,7 @@ impl CanisterPool { let source_ids = &self.imports.borrow().nodes; let start: Vec<_> = toplevel_canisters .iter() - .map(|name| Import::Canister(name.clone())) + .map(|canister| Import::Canister(canister.get_name().to_string())) .collect(); let start: Vec<_> = start .into_iter() @@ -673,10 +665,10 @@ impl CanisterPool { Ok(dest_graph) } - fn canister_dependencies(&self, toplevel_canisters: &[&Arc]) -> Vec> { + fn canister_dependencies(&self, toplevel_canisters: &[Arc]) -> Vec> { let iter = toplevel_canisters .iter() - .flat_map(|&canister| { + .flat_map(|canister| { // TODO: Is `unwrap` on the next line legit? let parent_node = *self .imports @@ -712,7 +704,7 @@ impl CanisterPool { &self, log: &Logger, build_config: &BuildConfig, - toplevel_canisters: &[&Arc], + toplevel_canisters: &[Arc], ) -> DfxResult<()> { // moc expects all .did files of dependencies to be in with name .did. // Copy .did files into this temporary directory. @@ -802,7 +794,7 @@ impl CanisterPool { &self, build_config: &BuildConfig, _order: &[CanisterId], - toplevel_canisters: &[&Arc], + toplevel_canisters: &[Arc], ) -> DfxResult<()> { // We don't want to simply remove the whole directory, as in the future, // we may want to keep the IDL files downloaded from network. @@ -821,11 +813,11 @@ impl CanisterPool { pub fn build_order( &self, env: &dyn Environment, - user_specified_canisters: &Option>, + toplevel_canisters: &Vec>, ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); let graph = - self.build_dependencies_graph(user_specified_canisters.clone(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? + self.build_dependencies_graph(toplevel_canisters, env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { let message = match graph.node_weight(cycle.node_id()) { Some(canister_id) => match self.get_canister_info(canister_id) { @@ -853,24 +845,24 @@ impl CanisterPool { log: &Logger, build_config: &BuildConfig, ) -> DfxResult>> { - println!("ORDX.len(): {}", build_config.user_specified_canisters.as_ref().unwrap().len()); // FIXME: Remove. - let order = self.build_order(env, &build_config.user_specified_canisters.clone())?; // TODO: Eliminate `clone`. - println!("ORD.len(): {}", order.len()); // FIXME: Remove. - // TODO: The next statement is slow and confusing code. - let toplevel_canisters: Vec<&Arc> = if let Some(canisters) = build_config.user_specified_canisters.clone() { + let toplevel_canisters: Vec> = if let Some(canisters) = build_config.user_specified_canisters.clone() { self .canisters .iter() .filter(|c| canisters.contains(&c.get_name().to_string())) + .map(|canister| canister.clone()) .collect() } else { self .canisters .iter() - // .filter(|c| order.contains(&c.canister_id())) + .map(|canister| canister.clone()) .collect() }; + println!("ORDX.len(): {}", build_config.user_specified_canisters.as_ref().unwrap().len()); // FIXME: Remove. + let order = self.build_order(env, &toplevel_canisters.clone())?; // TODO: Eliminate `clone`. + println!("ORD.len(): {}", order.len()); // FIXME: Remove. self.step_prebuild_all(log, build_config, toplevel_canisters.as_slice()) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; From 428ecfdcd5f8227a83086190385ce4de90004cc1 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 17:16:38 +0300 Subject: [PATCH 197/354] misc --- .../operations/canister/deploy_canisters.rs | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 60611a5c80..efc8c25f03 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -5,7 +5,7 @@ use crate::lib::environment::Environment; use crate::lib::error::DfxResult; use crate::lib::ic_attributes::CanisterSettings; use crate::lib::installers::assets::prepare_assets_for_proposal; -use crate::lib::models::canister::CanisterPool; +use crate::lib::models::canister::{Canister, CanisterPool}; use crate::lib::operations::canister::deploy_canisters::DeployMode::{ ComputeEvidence, ForceReinstallSingleCanister, NormalDeploy, PrepareForProposal, }; @@ -27,6 +27,7 @@ use itertools::Itertools; use slog::info; use std::convert::TryFrom; use std::path::{Path, PathBuf}; +use std::sync::Arc; use super::add_canisters_with_ids; @@ -103,9 +104,16 @@ pub async fn deploy_canisters( }) .collect(), }; + let toplevel_canisters = toplevel_canisters.into_iter() + .map(|name: String| -> DfxResult<_> { + Ok(canister_pool.get_first_canister_with_name(name.as_str()) + .ok_or_else(|| anyhow!("A canister with the name '{}' was not found in the current project.", name.clone()))? + ) + }) + .try_collect()?; // TODO: `build_order` is called two times during deployment of a new canister. - let order = canister_pool.build_order(env, &Some(toplevel_canisters.clone()))?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. + let order = canister_pool.build_order(env, &toplevel_canisters)?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. let order_names: Vec = order .iter() .map(|canister| { @@ -117,7 +125,7 @@ pub async fn deploy_canisters( }) .collect(); - let canisters_to_install: &Vec = &toplevel_canisters + let canisters_to_install: &Vec = &order_names .clone() .into_iter() .filter(|canister_name| { @@ -177,11 +185,10 @@ pub async fn deploy_canisters( info!(env.get_logger(), "All canisters have already been created."); } - println!("RRR: {:?}", &toplevel_canisters); build_canisters( env, // &order_names, - &toplevel_canisters, + &toplevel_canisters.as_slice(), &config, env_file.clone(), &canister_pool, @@ -325,7 +332,7 @@ async fn register_canisters( async fn build_canisters( env: &dyn Environment, // canisters_to_load: &[String], - toplevel_canisters: &[String], + toplevel_canisters: &[Arc], config: &Config, env_file: Option, canister_pool: &CanisterPool, @@ -335,10 +342,10 @@ async fn build_canisters( // let build_mode_check = false; // let canister_pool = CanisterPool::load(env, build_mode_check, canisters_to_load)?; - println!("TTT: {:?}", toplevel_canisters); let build_config = BuildConfig::from_config(config, env.get_network_descriptor().is_playground())? - .with_canisters_to_build(toplevel_canisters.into()) + .with_canisters_to_build( + toplevel_canisters.iter().map(|canister| canister.get_name().to_string()).collect()) // hack .with_env_file(env_file); canister_pool.build_or_fail(env, log, &build_config).await?; Ok(()) From 9bc54eead6af7e2ecbde9854a01942bd1cc1cc82 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 18:14:49 +0300 Subject: [PATCH 198/354] bug fix --- e2e/tests-dfx/make_like.bash | 73 +++++++++++++++--------------- src/dfx/src/lib/builders/mod.rs | 3 +- src/dfx/src/lib/models/canister.rs | 32 ++++++++----- 3 files changed, 59 insertions(+), 49 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 7ebf4b91b8..92b08cd75d 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -15,42 +15,43 @@ teardown() { standard_teardown } -@test "trying to break dependency compiling: deploy" { - dfx_start - - assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - touch dependent.mo - assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_not_contains '"moc-wrapper" "dependency.mo"' - - touch dependency.mo - assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - touch dependency.mo - assert_command dfx deploy -vv dependency - assert_not_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_not_contains '"moc-wrapper" "dependency.mo"' - - touch lib.mo - assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - touch lib.mo - assert_command dfx deploy -vv dependency - assert_not_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' -} +# FIXME: Uncomment. +# @test "trying to break dependency compiling: deploy" { +# dfx_start + +# assert_command dfx deploy -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# touch dependent.mo +# assert_command dfx deploy -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_not_contains '"moc-wrapper" "dependency.mo"' + +# touch dependency.mo +# assert_command dfx deploy -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# touch dependency.mo +# assert_command dfx deploy -vv dependency +# assert_not_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# assert_command dfx deploy -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_not_contains '"moc-wrapper" "dependency.mo"' + +# touch lib.mo +# assert_command dfx deploy -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# touch lib.mo +# assert_command dfx deploy -vv dependency +# assert_not_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' +# } @test "trying to break dependency compiling: build" { dfx_start diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 1ff6f2abe3..8b61c2b00a 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -348,7 +348,8 @@ pub trait CanisterBuilder { if let Some(imported_file) = imported_file { let imported_file_metadata = metadata(&imported_file)?; // FIXME: Need to check the full path. let imported_file_time = imported_file_metadata.modified()?; - println!("XXX: {} {:?} <= {}", // FIXME: Remove. + println!( + "XXX: {} {:?} <= {}", // FIXME: Remove. imported_file_time > wasm_file_time, subnode, output_wasm_path.to_str().unwrap(), ); if imported_file_time > wasm_file_time { diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 61338692b3..adb07daf1a 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -23,7 +23,7 @@ use ic_wasm::metadata::{add_metadata, remove_metadata, Kind}; use ic_wasm::optimize::OptLevel; use itertools::Itertools; use petgraph::graph::{DiGraph, NodeIndex}; -use petgraph::visit::Bfs; +use petgraph::visit::{Bfs, Dfs}; use rand::{thread_rng, RngCore}; use slog::{error, info, trace, warn, Logger}; use std::cell::RefCell; @@ -542,6 +542,7 @@ impl CanisterPool { self.canisters.iter().map(|c| c.as_ref()).collect() } + #[allow(unused)] // TODO pub fn get_canister_info(&self, canister_id: &CanisterId) -> Option<&CanisterInfo> { self.get_canister(canister_id).map(|c| &c.info) } @@ -818,17 +819,24 @@ impl CanisterPool { trace!(env.get_logger(), "Building dependencies graph."); let graph = self.build_dependencies_graph(toplevel_canisters, env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? - let nodes = petgraph::algo::toposort(&graph, None).map_err(|cycle| { - let message = match graph.node_weight(cycle.node_id()) { - Some(canister_id) => match self.get_canister_info(canister_id) { - Some(info) => info.get_name().to_string(), - None => format!("<{}>", canister_id.to_text()), - }, - None => "".to_string(), - }; - BuildError::DependencyError(format!("Found circular dependency: {}", message)) - })?; - Ok(nodes + + let toplevel_nodes = toplevel_canisters.iter().map( + |canister| self.imports.borrow().nodes.get(&Import::Canister(canister.get_name().to_string())).unwrap().clone()); + + // Make topological order of our nodes: + let mut nodes2 = Vec::new(); + let mut visited = HashMap::new(); + for node in toplevel_nodes { + if !visited.contains_key(&node) { + let mut dfs = Dfs::new(&graph, node); + while let Some(subnode) = dfs.next(&graph) { + nodes2.push(subnode); + visited.insert(subnode, ()); + } + } + } + + Ok(nodes2 .iter() .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. .map(|idx| *graph.node_weight(*idx).unwrap()) From b3a869734e7e85f3f5b11c1f5295170e63f1ecdf Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 18:26:33 +0300 Subject: [PATCH 199/354] function renamed --- src/dfx/src/lib/models/canister.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index adb07daf1a..86428085e5 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -562,7 +562,7 @@ impl CanisterPool { /// Build only dependencies relevant for `user_specified_canisters`. #[context("Failed to build dependencies graph for canister pool.")] - fn build_dependencies_graph( + fn build_canister_dependencies_graph( &self, toplevel_canisters: &Vec>, cache: &dyn Cache, @@ -818,7 +818,7 @@ impl CanisterPool { ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); let graph = - self.build_dependencies_graph(toplevel_canisters, env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? + self.build_canister_dependencies_graph(toplevel_canisters, env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? let toplevel_nodes = toplevel_canisters.iter().map( |canister| self.imports.borrow().nodes.get(&Import::Canister(canister.get_name().to_string())).unwrap().clone()); @@ -829,7 +829,7 @@ impl CanisterPool { for node in toplevel_nodes { if !visited.contains_key(&node) { let mut dfs = Dfs::new(&graph, node); - while let Some(subnode) = dfs.next(&graph) { + while let Some(subnode) = dfs.next(&graph) { // FIXME nodes2.push(subnode); visited.insert(subnode, ()); } From 65dcb236db3e29cd9b3413e28b5ec85448bf18b2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 18:35:48 +0300 Subject: [PATCH 200/354] bug fix --- src/dfx/src/lib/models/canister.rs | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 86428085e5..8bf39079f9 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -566,7 +566,7 @@ impl CanisterPool { &self, toplevel_canisters: &Vec>, cache: &dyn Cache, - ) -> DfxResult> { + ) -> DfxResult<(DiGraph, HashMap)> { for canister in &self.canisters { // a little inefficient let contains = toplevel_canisters @@ -603,7 +603,8 @@ impl CanisterPool { // Transform the graph of file dependencies to graph of canister dependencies. // For this do DFS for each of `real_canisters_to_build`. let mut dest_graph: DiGraph = DiGraph::new(); - let mut dest_id_set = HashMap::new(); + let mut dest_id_to_source_id = HashMap::new(); + let mut dest_nodes = HashMap::new(); for start_node in start.into_iter() { // Initialize "mirrors" of the parent node of source graph in dest graph: let parent = source_graph.node_weight(start_node).unwrap(); @@ -617,7 +618,7 @@ impl CanisterPool { .get_first_canister_with_name(parent_name) .unwrap() .canister_id(); - dest_id_set + dest_id_to_source_id .entry(start_node) .or_insert_with(|| dest_graph.add_node(parent_canister)); @@ -654,8 +655,10 @@ impl CanisterPool { .ok_or_else(|| anyhow!("A canister with the name '{}' was not found in the current project.", child_name.clone()))? .canister_id(); - let dest_parent_id = *dest_id_set.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); - let dest_child_id = *dest_id_set.entry(source_child_id).or_insert_with(|| dest_graph.add_node(child_canister)); + let dest_parent_id = *dest_id_to_source_id.entry(source_parent_id).or_insert_with(|| dest_graph.add_node(parent_canister)); + let dest_child_id = *dest_id_to_source_id.entry(source_child_id).or_insert_with(|| dest_graph.add_node(child_canister)); + dest_nodes.insert(parent_canister, dest_parent_id); + dest_nodes.insert(child_canister, dest_child_id); dest_graph.update_edge(dest_parent_id, dest_child_id, ()); Ok(()) @@ -663,7 +666,7 @@ impl CanisterPool { )?; } - Ok(dest_graph) + Ok((dest_graph, dest_nodes)) } fn canister_dependencies(&self, toplevel_canisters: &[Arc]) -> Vec> { @@ -817,11 +820,11 @@ impl CanisterPool { toplevel_canisters: &Vec>, ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); - let graph = + let (graph, nodes) = self.build_canister_dependencies_graph(toplevel_canisters, env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? let toplevel_nodes = toplevel_canisters.iter().map( - |canister| self.imports.borrow().nodes.get(&Import::Canister(canister.get_name().to_string())).unwrap().clone()); + |canister| nodes.get(&canister.canister_id()).unwrap().clone()); // Make topological order of our nodes: let mut nodes2 = Vec::new(); From 66129df6bc9ced2c5fa48b190db23bb44bb25d4a Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 18:39:31 +0300 Subject: [PATCH 201/354] more tests --- e2e/tests-dfx/make_like.bash | 73 ++++++++++++++++++------------------ 1 file changed, 36 insertions(+), 37 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 92b08cd75d..7ebf4b91b8 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -15,43 +15,42 @@ teardown() { standard_teardown } -# FIXME: Uncomment. -# @test "trying to break dependency compiling: deploy" { -# dfx_start - -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# touch dependent.mo -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_not_contains '"moc-wrapper" "dependency.mo"' - -# touch dependency.mo -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# touch dependency.mo -# assert_command dfx deploy -vv dependency -# assert_not_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_not_contains '"moc-wrapper" "dependency.mo"' - -# touch lib.mo -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# touch lib.mo -# assert_command dfx deploy -vv dependency -# assert_not_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' -# } +@test "trying to break dependency compiling: deploy" { + dfx_start + + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + touch dependent.mo + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_not_contains '"moc-wrapper" "dependency.mo"' + + touch dependency.mo + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + touch dependency.mo + assert_command dfx deploy -vv dependency + assert_not_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_not_contains '"moc-wrapper" "dependency.mo"' + + touch lib.mo + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + touch lib.mo + assert_command dfx deploy -vv dependency + assert_not_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' +} @test "trying to break dependency compiling: build" { dfx_start From 741a6668c33f2c3df5bd9e92a687d7672bc752fa Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 18:58:20 +0300 Subject: [PATCH 202/354] bug fix --- src/dfx/src/lib/models/canister.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 8bf39079f9..86efb95335 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -830,9 +830,9 @@ impl CanisterPool { let mut nodes2 = Vec::new(); let mut visited = HashMap::new(); for node in toplevel_nodes { - if !visited.contains_key(&node) { - let mut dfs = Dfs::new(&graph, node); - while let Some(subnode) = dfs.next(&graph) { // FIXME + let mut dfs = Dfs::new(&graph, node); + while let Some(subnode) = dfs.next(&graph) { // FIXME + if !visited.contains_key(&node) { nodes2.push(subnode); visited.insert(subnode, ()); } From 250962a900962de69b791e8129254c6e68e97286 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 19:04:11 +0300 Subject: [PATCH 203/354] removed tracing --- e2e/tests-dfx/make_like.bash | 73 +++++++++++++++--------------- src/dfx/src/lib/models/canister.rs | 4 -- 2 files changed, 37 insertions(+), 40 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 7ebf4b91b8..92b08cd75d 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -15,42 +15,43 @@ teardown() { standard_teardown } -@test "trying to break dependency compiling: deploy" { - dfx_start - - assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - touch dependent.mo - assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_not_contains '"moc-wrapper" "dependency.mo"' - - touch dependency.mo - assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - touch dependency.mo - assert_command dfx deploy -vv dependency - assert_not_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_not_contains '"moc-wrapper" "dependency.mo"' - - touch lib.mo - assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' - - touch lib.mo - assert_command dfx deploy -vv dependency - assert_not_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' -} +# FIXME: Uncomment. +# @test "trying to break dependency compiling: deploy" { +# dfx_start + +# assert_command dfx deploy -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# touch dependent.mo +# assert_command dfx deploy -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_not_contains '"moc-wrapper" "dependency.mo"' + +# touch dependency.mo +# assert_command dfx deploy -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# touch dependency.mo +# assert_command dfx deploy -vv dependency +# assert_not_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# assert_command dfx deploy -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_not_contains '"moc-wrapper" "dependency.mo"' + +# touch lib.mo +# assert_command dfx deploy -vv dependent +# assert_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' + +# touch lib.mo +# assert_command dfx deploy -vv dependency +# assert_not_contains '"moc-wrapper" "dependent.mo"' +# assert_contains '"moc-wrapper" "dependency.mo"' +# } @test "trying to break dependency compiling: build" { dfx_start diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 86efb95335..cb31943e27 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -871,9 +871,7 @@ impl CanisterPool { .map(|canister| canister.clone()) .collect() }; - println!("ORDX.len(): {}", build_config.user_specified_canisters.as_ref().unwrap().len()); // FIXME: Remove. let order = self.build_order(env, &toplevel_canisters.clone())?; // TODO: Eliminate `clone`. - println!("ORD.len(): {}", order.len()); // FIXME: Remove. self.step_prebuild_all(log, build_config, toplevel_canisters.as_slice()) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; @@ -893,7 +891,6 @@ impl CanisterPool { env.get_cache().as_ref(), env.get_logger(), )? { - println!("AAA: {}", canister.info.get_name()); // FIXME: Remove. result.push( self.step_prebuild(build_config, canister) .map_err(|e| { @@ -925,7 +922,6 @@ impl CanisterPool { }), ); } - println!("YYY: {}", canister.info.get_name()); // FIXME: Remove. } } From be650ace27c4436f5d548e31632c8ffa810bce2d Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 23:03:47 +0300 Subject: [PATCH 204/354] bug fix --- src/dfx/src/lib/models/canister.rs | 56 +++++++++++++++++++++++------- 1 file changed, 43 insertions(+), 13 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index cb31943e27..bcabba3f4a 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -22,8 +22,9 @@ use fn_error_context::context; use ic_wasm::metadata::{add_metadata, remove_metadata, Kind}; use ic_wasm::optimize::OptLevel; use itertools::Itertools; +use petgraph::algo::toposort; use petgraph::graph::{DiGraph, NodeIndex}; -use petgraph::visit::{Bfs, Dfs}; +use petgraph::visit::Bfs; use rand::{thread_rng, RngCore}; use slog::{error, info, trace, warn, Logger}; use std::cell::RefCell; @@ -561,8 +562,10 @@ impl CanisterPool { } /// Build only dependencies relevant for `user_specified_canisters`. + /// + /// TODO: Probably shouldn't be `pub`. #[context("Failed to build dependencies graph for canister pool.")] - fn build_canister_dependencies_graph( + pub fn build_canister_dependencies_graph( &self, toplevel_canisters: &Vec>, cache: &dyn Cache, @@ -712,14 +715,18 @@ impl CanisterPool { ) -> DfxResult<()> { // moc expects all .did files of dependencies to be in with name .did. // Copy .did files into this temporary directory. + println!("XXX step_prebuild_all"); // FIXME: Remove. for canister in self.canister_dependencies(toplevel_canisters) { + println!("CANISTER: {}", canister.get_name()); // FIXME: Remove. let maybe_from = if let Some(remote_candid) = canister.info.get_remote_candid() { Some(remote_candid) } else { canister.info.get_output_idl_path() }; + // TODO: It tries to copy non-existing files (not yet compiled canisters..) if let Some(from) = maybe_from.as_ref() { if from.exists() { + println!("from.exists"); // FIXME: Remove. let to = build_config.idl_root.join(format!( "{}.did", canister.info.get_canister_id()?.to_text() @@ -733,6 +740,7 @@ impl CanisterPool { ); dfx_core::fs::composite::ensure_parent_dir_exists(&to)?; dfx_core::fs::copy(from, &to)?; + println!("COPYTO: {}", to.to_str().unwrap()); // FIXME: Remove. dfx_core::fs::set_permissions_readwrite(&to)?; } else { warn!( @@ -826,20 +834,41 @@ impl CanisterPool { let toplevel_nodes = toplevel_canisters.iter().map( |canister| nodes.get(&canister.canister_id()).unwrap().clone()); - // Make topological order of our nodes: - let mut nodes2 = Vec::new(); - let mut visited = HashMap::new(); - for node in toplevel_nodes { - let mut dfs = Dfs::new(&graph, node); - while let Some(subnode) = dfs.next(&graph) { // FIXME - if !visited.contains_key(&node) { - nodes2.push(subnode); - visited.insert(subnode, ()); - } + let mut reachable_nodes = HashMap::new(); + + for start_node in toplevel_nodes { + let mut bfs = Bfs::new(&graph, start_node); // or `Dfs`, does not matter + while let Some(node) = bfs.next(&graph) { + reachable_nodes.insert(node, ()); } } - Ok(nodes2 + let subgraph = graph + .filter_map( + |node, _| if reachable_nodes.contains_key(&node) { + Some(node) + } else { + None + }, + |edge, _| Some(edge)); + + // TODO: better error message + let nodes = toposort(&subgraph, None).map_err(|_e| anyhow!("Cycle in node dependencies")) ?; + + // Make topological order of our nodes: + // let mut nodes2 = Vec::new(); + // let mut visited = HashMap::new(); + // for node in toplevel_nodes { + // let mut dfs = Dfs::new(&graph, node); + // while let Some(subnode) = dfs.next(&graph) { + // if !visited.contains_key(&node) { + // nodes2.push(subnode); + // visited.insert(subnode, ()); + // } + // } + // } + + Ok(nodes .iter() .rev() // Reverse the order, as we have a dependency graph, we want to reverse indices. .map(|idx| *graph.node_weight(*idx).unwrap()) @@ -872,6 +901,7 @@ impl CanisterPool { .collect() }; let order = self.build_order(env, &toplevel_canisters.clone())?; // TODO: Eliminate `clone`. + println!("PPP: order.len: {}", order.len()); self.step_prebuild_all(log, build_config, toplevel_canisters.as_slice()) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; From 4c26aa16769dd7cb1cbaab76737dc305de462759 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 23:03:53 +0300 Subject: [PATCH 205/354] bug fix --- src/dfx/src/lib/builders/motoko.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 41af71f831..b1d0d50d6d 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -188,6 +188,7 @@ impl CanisterBuilder for MotokoBuilder { let id_map = pool .get_canister_list() .iter() + .filter(|&c| canister_info.get_dependencies().iter().map(|s| s.as_str()).find(|&name| name == c.get_name()).is_some()) // TODO: 1. Slow. 2. Use Motoko dependencies where appropriate. .map(|c| (c.get_name().to_string(), c.canister_id().to_text())) .collect(); From 17bd807dbf753ef42143449304d98346165565b5 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 23:04:44 +0300 Subject: [PATCH 206/354] comment --- src/dfx/src/lib/models/canister.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index bcabba3f4a..c0bdabe286 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -834,6 +834,8 @@ impl CanisterPool { let toplevel_nodes = toplevel_canisters.iter().map( |canister| nodes.get(&canister.canister_id()).unwrap().clone()); + // TODO: The following isn't very efficient. + let mut reachable_nodes = HashMap::new(); for start_node in toplevel_nodes { From 073054ac16f475f81572100394039730d0fd7709 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 23:08:34 +0300 Subject: [PATCH 207/354] make_like.bash tests passed --- e2e/tests-dfx/make_like.bash | 73 ++++++++++++++++++------------------ 1 file changed, 36 insertions(+), 37 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 92b08cd75d..7ebf4b91b8 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -15,43 +15,42 @@ teardown() { standard_teardown } -# FIXME: Uncomment. -# @test "trying to break dependency compiling: deploy" { -# dfx_start - -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# touch dependent.mo -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_not_contains '"moc-wrapper" "dependency.mo"' - -# touch dependency.mo -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# touch dependency.mo -# assert_command dfx deploy -vv dependency -# assert_not_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_not_contains '"moc-wrapper" "dependency.mo"' - -# touch lib.mo -# assert_command dfx deploy -vv dependent -# assert_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' - -# touch lib.mo -# assert_command dfx deploy -vv dependency -# assert_not_contains '"moc-wrapper" "dependent.mo"' -# assert_contains '"moc-wrapper" "dependency.mo"' -# } +@test "trying to break dependency compiling: deploy" { + dfx_start + + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + touch dependent.mo + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_not_contains '"moc-wrapper" "dependency.mo"' + + touch dependency.mo + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + touch dependency.mo + assert_command dfx deploy -vv dependency + assert_not_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_not_contains '"moc-wrapper" "dependency.mo"' + + touch lib.mo + assert_command dfx deploy -vv dependent + assert_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' + + touch lib.mo + assert_command dfx deploy -vv dependency + assert_not_contains '"moc-wrapper" "dependent.mo"' + assert_contains '"moc-wrapper" "dependency.mo"' +} @test "trying to break dependency compiling: build" { dfx_start From 6d067592778d9a405f9b0e04839cc517714fbcb5 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 23:10:42 +0300 Subject: [PATCH 208/354] tracing removed --- src/dfx/src/lib/models/canister.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index c0bdabe286..1171b61de6 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -715,9 +715,7 @@ impl CanisterPool { ) -> DfxResult<()> { // moc expects all .did files of dependencies to be in with name .did. // Copy .did files into this temporary directory. - println!("XXX step_prebuild_all"); // FIXME: Remove. for canister in self.canister_dependencies(toplevel_canisters) { - println!("CANISTER: {}", canister.get_name()); // FIXME: Remove. let maybe_from = if let Some(remote_candid) = canister.info.get_remote_candid() { Some(remote_candid) } else { @@ -726,7 +724,6 @@ impl CanisterPool { // TODO: It tries to copy non-existing files (not yet compiled canisters..) if let Some(from) = maybe_from.as_ref() { if from.exists() { - println!("from.exists"); // FIXME: Remove. let to = build_config.idl_root.join(format!( "{}.did", canister.info.get_canister_id()?.to_text() @@ -740,7 +737,6 @@ impl CanisterPool { ); dfx_core::fs::composite::ensure_parent_dir_exists(&to)?; dfx_core::fs::copy(from, &to)?; - println!("COPYTO: {}", to.to_str().unwrap()); // FIXME: Remove. dfx_core::fs::set_permissions_readwrite(&to)?; } else { warn!( @@ -903,7 +899,6 @@ impl CanisterPool { .collect() }; let order = self.build_order(env, &toplevel_canisters.clone())?; // TODO: Eliminate `clone`. - println!("PPP: order.len: {}", order.len()); self.step_prebuild_all(log, build_config, toplevel_canisters.as_slice()) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; From 662dbe9554f0084338966833cfef613139658f15 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Thu, 25 Apr 2024 23:29:27 +0300 Subject: [PATCH 209/354] debugging --- e2e/assets/make_like/README.txt | 1 + e2e/assets/make_like/dfx.json | 4 ++-- e2e/assets/make_like/{ => src}/dependency.mo | 0 e2e/assets/make_like/{ => src}/dependent.mo | 0 src/dfx/src/lib/builders/mod.rs | 4 ---- src/dfx/src/lib/models/canister.rs | 1 + 6 files changed, 4 insertions(+), 6 deletions(-) create mode 100644 e2e/assets/make_like/README.txt rename e2e/assets/make_like/{ => src}/dependency.mo (100%) rename e2e/assets/make_like/{ => src}/dependent.mo (100%) diff --git a/e2e/assets/make_like/README.txt b/e2e/assets/make_like/README.txt new file mode 100644 index 0000000000..014d9afb98 --- /dev/null +++ b/e2e/assets/make_like/README.txt @@ -0,0 +1 @@ +Sources are put into src/ to check how it behaves with subdirectories. \ No newline at end of file diff --git a/e2e/assets/make_like/dfx.json b/e2e/assets/make_like/dfx.json index 31c3b730b3..564f3de8c2 100644 --- a/e2e/assets/make_like/dfx.json +++ b/e2e/assets/make_like/dfx.json @@ -2,10 +2,10 @@ "version": 1, "canisters": { "dependency": { - "main": "dependency.mo" + "main": "src/dependency.mo" }, "dependent": { - "main": "dependent.mo", + "main": "src/dependent.mo", "dependencies": [ "dependency" ] diff --git a/e2e/assets/make_like/dependency.mo b/e2e/assets/make_like/src/dependency.mo similarity index 100% rename from e2e/assets/make_like/dependency.mo rename to e2e/assets/make_like/src/dependency.mo diff --git a/e2e/assets/make_like/dependent.mo b/e2e/assets/make_like/src/dependent.mo similarity index 100% rename from e2e/assets/make_like/dependent.mo rename to e2e/assets/make_like/src/dependent.mo diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 8b61c2b00a..5f9fdca56a 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -348,10 +348,6 @@ pub trait CanisterBuilder { if let Some(imported_file) = imported_file { let imported_file_metadata = metadata(&imported_file)?; // FIXME: Need to check the full path. let imported_file_time = imported_file_metadata.modified()?; - println!( - "XXX: {} {:?} <= {}", // FIXME: Remove. - imported_file_time > wasm_file_time, subnode, output_wasm_path.to_str().unwrap(), - ); if imported_file_time > wasm_file_time { break; }; diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 1171b61de6..a6f4d046a5 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -827,6 +827,7 @@ impl CanisterPool { let (graph, nodes) = self.build_canister_dependencies_graph(toplevel_canisters, env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? + // TODO: If source files are unreadable, this panics. let toplevel_nodes = toplevel_canisters.iter().map( |canister| nodes.get(&canister.canister_id()).unwrap().clone()); From 358881f076d2d4f4ded00f180dcb87f0cb6b25a7 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 01:36:22 +0300 Subject: [PATCH 210/354] bug fixing (does not compile) --- src/dfx/src/lib/builders/mod.rs | 24 ++++++++++++------------ src/dfx/src/lib/builders/motoko.rs | 18 +++++++++++++++++- 2 files changed, 29 insertions(+), 13 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 5f9fdca56a..c3b8852ecb 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -331,18 +331,18 @@ pub trait CanisterBuilder { continue; } Import::Relative(path) => { - Some(Path::new(path).to_owned()) - // FIXME: Need to check the full path. - // Some(if path.exists() { - // Path::new(path).to_owned() - // } else { - // let path2 = path.join(Path::new("lib.mo")); - // if path2.exists() { - // path2.to_owned() - // } else { - // bail!("source file has been deleted"); - // } - // }) + // duplicate code + let full_path = if path.is_absolute() { // can this be? + *path + } else { + base_path.join(path) + }; + let path2 = full_path.join(Path::new("lib.mo")); + Some(if path2.exists() { + path2 + } else { + path + }) } }; if let Some(imported_file) = imported_file { diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index b1d0d50d6d..7ec4067db2 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -56,6 +56,7 @@ pub fn add_imports( imports: &mut ImportsTracker, pool: &CanisterPool, top: Option<&CanisterInfo>, // hackish + base_path: Path, ) -> DfxResult { let parent = if let Some(top) = top { Import::Canister(top.get_name().to_string()) // a little inefficient @@ -82,7 +83,19 @@ pub fn add_imports( let child = Import::try_from(line).context("Failed to create MotokoImport.")?; match &child { Import::Relative(path) => { - add_imports_recursive(cache, path.as_path(), imports, pool, None)?; + let full_child_path = if path.is_absolute() { // can this be? + *path + } else { + base_path.join(path) + }; + // duplicate code + let path2 = path.join(Path::new("lib.mo")); + let child_base_path = if path2.exists() { + path + } else { + path.parent().unwrap() // FIXME: `unwrap()` + }; + add_imports_recursive(cache, path.as_path(), imports, pool, None, child_base_path)?; } Import::Canister(canister_name) => { // duplicate code @@ -91,12 +104,14 @@ pub fn add_imports( { let main_file = canister.get_info().get_main_file(); if let Some(main_file) = main_file { + let child_base_path = main_file.parent().unwrap(); // FIXME: `unwrap()` add_imports_recursive( cache, Path::new(main_file), imports, pool, Some(canister.get_info()), + child_base_path, )?; } } @@ -125,6 +140,7 @@ pub fn add_imports( imports, pool, Some(info), + Path::new("."), )?; Ok(()) From b4ecb96a1725088c17900f74d987564bc11550ca Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 01:50:15 +0300 Subject: [PATCH 211/354] bug fix (untested) --- src/dfx/src/lib/builders/mod.rs | 8 +++++--- src/dfx/src/lib/builders/motoko.rs | 14 +++++++------- src/dfx/src/lib/models/canister.rs | 8 +++++++- 3 files changed, 19 insertions(+), 11 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index c3b8852ecb..8a33a3e58f 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -36,6 +36,8 @@ pub use custom::custom_download; use self::motoko::add_imports; +use super::models::canister::RelativePath; + #[derive(Debug)] pub enum WasmBuildOutput { // Wasm(Vec), @@ -330,10 +332,10 @@ pub trait CanisterBuilder { // Skip libs, all changes by package managers don't modify existing directories but create new ones. continue; } - Import::Relative(path) => { + Import::Relative(RelativePath { path, base_path }) => { // duplicate code let full_path = if path.is_absolute() { // can this be? - *path + path.clone() } else { base_path.join(path) }; @@ -341,7 +343,7 @@ pub trait CanisterBuilder { Some(if path2.exists() { path2 } else { - path + path.clone() }) } }; diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 7ec4067db2..916f333de3 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -6,7 +6,7 @@ use crate::lib::canister_info::CanisterInfo; use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE}; -use crate::lib::models::canister::{CanisterPool, Import, ImportsTracker}; +use crate::lib::models::canister::{CanisterPool, Import, ImportsTracker, RelativePath}; use crate::lib::package_arguments::{self, PackageArguments}; use crate::util::assets::management_idl; use anyhow::Context; @@ -56,12 +56,12 @@ pub fn add_imports( imports: &mut ImportsTracker, pool: &CanisterPool, top: Option<&CanisterInfo>, // hackish - base_path: Path, + base_path: &Path, ) -> DfxResult { let parent = if let Some(top) = top { Import::Canister(top.get_name().to_string()) // a little inefficient } else { - Import::Relative(file.to_path_buf()) + Import::Relative(RelativePath { path: file.to_path_buf(), base_path: base_path.to_path_buf() }) }; if imports.nodes.get(&parent).is_some() { // The item is already in the graph. @@ -82,14 +82,14 @@ pub fn add_imports( for line in output.lines() { let child = Import::try_from(line).context("Failed to create MotokoImport.")?; match &child { - Import::Relative(path) => { + Import::Relative(RelativePath { path, base_path }) => { let full_child_path = if path.is_absolute() { // can this be? - *path + path.clone() } else { base_path.join(path) }; // duplicate code - let path2 = path.join(Path::new("lib.mo")); + let path2 = full_child_path.join(Path::new("lib.mo")); let child_base_path = if path2.exists() { path } else { @@ -409,7 +409,7 @@ impl TryFrom<&str> for Import { path.display() )))); }; - Import::Relative(path) + Import::Relative(RelativePath { path, base_path: PathBuf::from("") }) // TODO: `""` is a hack. } None => { return Err(DfxError::new(BuildError::DependencyError(format!( diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index a6f4d046a5..cf80453ea5 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -440,6 +440,12 @@ fn check_valid_subtype(compiled_idl_path: &Path, specified_idl_path: &Path) -> D Ok(()) } +#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)] +pub struct RelativePath { + pub path: PathBuf, + pub base_path: PathBuf, +} + /// Used mainly for Motoko /// /// TODO: Copying this type uses `String.clone()` what may be inefficient. @@ -448,7 +454,7 @@ pub enum Import { Canister(String), Ic(String), Lib(String), // TODO: Unused, because package manager never update existing files (but create new dirs) - Relative(PathBuf), + Relative(RelativePath), } /// The graph of imports (used mainly for Motoko) From 50ce05f9701a47fb7f1098dd1b4faf96265ca3b6 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 02:55:17 +0300 Subject: [PATCH 212/354] rewriting to fix a bug --- src/dfx/src/lib/builders/mod.rs | 11 ++-------- src/dfx/src/lib/builders/motoko.rs | 35 ++++++++++++++---------------- src/dfx/src/lib/models/canister.rs | 8 +------ 3 files changed, 19 insertions(+), 35 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 8a33a3e58f..04d4483c49 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -36,8 +36,6 @@ pub use custom::custom_download; use self::motoko::add_imports; -use super::models::canister::RelativePath; - #[derive(Debug)] pub enum WasmBuildOutput { // Wasm(Vec), @@ -332,18 +330,13 @@ pub trait CanisterBuilder { // Skip libs, all changes by package managers don't modify existing directories but create new ones. continue; } - Import::Relative(RelativePath { path, base_path }) => { + Import::FullPath(full_path) => { // duplicate code - let full_path = if path.is_absolute() { // can this be? - path.clone() - } else { - base_path.join(path) - }; let path2 = full_path.join(Path::new("lib.mo")); Some(if path2.exists() { path2 } else { - path.clone() + full_path.clone() }) } }; diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 916f333de3..836154b29c 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -6,7 +6,7 @@ use crate::lib::canister_info::CanisterInfo; use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE}; -use crate::lib::models::canister::{CanisterPool, Import, ImportsTracker, RelativePath}; +use crate::lib::models::canister::{CanisterPool, Import, ImportsTracker}; use crate::lib::package_arguments::{self, PackageArguments}; use crate::util::assets::management_idl; use anyhow::Context; @@ -56,12 +56,13 @@ pub fn add_imports( imports: &mut ImportsTracker, pool: &CanisterPool, top: Option<&CanisterInfo>, // hackish - base_path: &Path, ) -> DfxResult { + println!("MMM: {}", file.to_str().unwrap()); // FIXME: Remove. + let base_path = file.parent().unwrap(); // FIXME: `unwrap()` let parent = if let Some(top) = top { Import::Canister(top.get_name().to_string()) // a little inefficient } else { - Import::Relative(RelativePath { path: file.to_path_buf(), base_path: base_path.to_path_buf() }) + Import::FullPath(base_path.join(file)) }; if imports.nodes.get(&parent).is_some() { // The item is already in the graph. @@ -78,24 +79,21 @@ pub fn add_imports( .output() .with_context(|| format!("Error executing {:#?}", command))?; let output = String::from_utf8_lossy(&output.stdout); + println!("XXX: {}", output.to_string()); // FIXME: Remove. for line in output.lines() { let child = Import::try_from(line).context("Failed to create MotokoImport.")?; match &child { - Import::Relative(RelativePath { path, base_path }) => { - let full_child_path = if path.is_absolute() { // can this be? - path.clone() - } else { - base_path.join(path) - }; + Import::FullPath(full_child_path) => { + println!("RRR: {}", full_child_path.to_str().unwrap()); // FIXME: Remove. // duplicate code let path2 = full_child_path.join(Path::new("lib.mo")); - let child_base_path = if path2.exists() { - path + let child_path = if path2.exists() { + &path2 } else { - path.parent().unwrap() // FIXME: `unwrap()` + full_child_path }; - add_imports_recursive(cache, path.as_path(), imports, pool, None, child_base_path)?; + add_imports_recursive(cache, child_path.as_path(), imports, pool, None)?; } Import::Canister(canister_name) => { // duplicate code @@ -104,14 +102,12 @@ pub fn add_imports( { let main_file = canister.get_info().get_main_file(); if let Some(main_file) = main_file { - let child_base_path = main_file.parent().unwrap(); // FIXME: `unwrap()` add_imports_recursive( cache, Path::new(main_file), imports, pool, Some(canister.get_info()), - child_base_path, )?; } } @@ -136,11 +132,10 @@ pub fn add_imports( add_imports_recursive( cache, - motoko_info.get_main_path(), + motoko_info.get_main_path().canonicalize()?.as_path(), imports, pool, Some(info), - Path::new("."), )?; Ok(()) @@ -379,6 +374,7 @@ impl TryFrom<&str> for Import { } None => (line, None), }; + println!("PPP: {:?} / {:?}", url, fullpath); // FIXME: Remove. let import = match url.find(':') { Some(index) => { if index >= line.len() - 1 { @@ -402,14 +398,15 @@ impl TryFrom<&str> for Import { } None => match fullpath { Some(fullpath) => { + println!("ZZZ: {}", fullpath); // FIXME: Remove. let path = PathBuf::from(fullpath); - if !path.is_file() { + if !path.is_file() { // FIXME: What's about `/lib.mo` paths? return Err(DfxError::new(BuildError::DependencyError(format!( "Cannot find import file {}", path.display() )))); }; - Import::Relative(RelativePath { path, base_path: PathBuf::from("") }) // TODO: `""` is a hack. + Import::FullPath(path) // TODO: `""` is a hack. } None => { return Err(DfxError::new(BuildError::DependencyError(format!( diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index cf80453ea5..3d3fa375c6 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -440,12 +440,6 @@ fn check_valid_subtype(compiled_idl_path: &Path, specified_idl_path: &Path) -> D Ok(()) } -#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)] -pub struct RelativePath { - pub path: PathBuf, - pub base_path: PathBuf, -} - /// Used mainly for Motoko /// /// TODO: Copying this type uses `String.clone()` what may be inefficient. @@ -454,7 +448,7 @@ pub enum Import { Canister(String), Ic(String), Lib(String), // TODO: Unused, because package manager never update existing files (but create new dirs) - Relative(RelativePath), + FullPath(PathBuf), } /// The graph of imports (used mainly for Motoko) From f8ca306390e146e52b3bbce2265ba7663e66f8aa Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 02:55:52 +0300 Subject: [PATCH 213/354] bug in a e2e test fixed --- e2e/assets/make_like/{ => src}/lib.mo | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename e2e/assets/make_like/{ => src}/lib.mo (100%) diff --git a/e2e/assets/make_like/lib.mo b/e2e/assets/make_like/src/lib.mo similarity index 100% rename from e2e/assets/make_like/lib.mo rename to e2e/assets/make_like/src/lib.mo From 7a187babf6374e1279d34a863e397183064f1ad1 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 02:57:13 +0300 Subject: [PATCH 214/354] bug in a e2e test fixed --- e2e/tests-dfx/make_like.bash | 56 ++++++++++++++++++------------------ 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 7ebf4b91b8..05091dfd79 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -19,37 +19,37 @@ teardown() { dfx_start assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' touch dependent.mo assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_not_contains '"moc-wrapper" "dependency.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' touch dependency.mo assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' touch dependency.mo assert_command dfx deploy -vv dependency - assert_not_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_not_contains '"moc-wrapper" "dependency.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' touch lib.mo assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' touch lib.mo assert_command dfx deploy -vv dependency - assert_not_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' } @test "trying to break dependency compiling: build" { @@ -58,35 +58,35 @@ teardown() { assert_command dfx canister create dependency assert_command dfx canister create dependent assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' touch dependent.mo assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_not_contains '"moc-wrapper" "dependency.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' touch dependency.mo assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' touch dependency.mo assert_command dfx build -vv dependency - assert_not_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_not_contains '"moc-wrapper" "dependency.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' touch lib.mo assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' touch lib.mo assert_command dfx build -vv dependency - assert_not_contains '"moc-wrapper" "dependent.mo"' - assert_contains '"moc-wrapper" "dependency.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' } From c855cf4ccc3270cff16856d4379f57fed308611f Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 03:18:51 +0300 Subject: [PATCH 215/354] bug fix in tests --- e2e/tests-dfx/make_like.bash | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 05091dfd79..f6c5979fe7 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -22,17 +22,17 @@ teardown() { assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' - touch dependent.mo + touch src/dependent.mo assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' - touch dependency.mo + touch src/dependency.mo assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' - touch dependency.mo + touch src/dependency.mo assert_command dfx deploy -vv dependency assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' @@ -41,12 +41,12 @@ teardown() { assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' - touch lib.mo + touch src/lib.mo assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' - touch lib.mo + touch src/lib.mo assert_command dfx deploy -vv dependency assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' @@ -61,17 +61,17 @@ teardown() { assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' - touch dependent.mo + touch src/dependent.mo assert_command dfx build -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' - touch dependency.mo + touch src/dependency.mo assert_command dfx build -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' - touch dependency.mo + touch src/dependency.mo assert_command dfx build -vv dependency assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' @@ -80,12 +80,12 @@ teardown() { assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' - touch lib.mo + touch src/lib.mo assert_command dfx build -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' - touch lib.mo + touch src/lib.mo assert_command dfx build -vv dependency assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' From 7c5e027af5bd3b2e98a5c9aadb3a0c7ef72f2708 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 03:22:06 +0300 Subject: [PATCH 216/354] tests passed --- src/dfx/src/lib/builders/mod.rs | 9 +-------- src/dfx/src/lib/builders/motoko.rs | 5 ----- 2 files changed, 1 insertion(+), 13 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 04d4483c49..dd14bcdd3c 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -288,18 +288,11 @@ pub trait CanisterBuilder { panic!("programming error"); }; let mut import_iter = Bfs::new(&imports.graph, start); - // let mut top_level = true; // the first canister is our own canister and therefore is a dependency. loop { - // let top_level1 = top_level; - // top_level = false; if let Some(import) = import_iter.next(&imports.graph) { let subnode = &imports.graph[import]; let imported_file = match subnode { Import::Canister(canister_name) => { - // if !top_level1 { - // continue; - // } - // duplicate code if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { @@ -341,7 +334,7 @@ pub trait CanisterBuilder { } }; if let Some(imported_file) = imported_file { - let imported_file_metadata = metadata(&imported_file)?; // FIXME: Need to check the full path. + let imported_file_metadata = metadata(&imported_file)?; let imported_file_time = imported_file_metadata.modified()?; if imported_file_time > wasm_file_time { break; diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 836154b29c..8e80201294 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -57,7 +57,6 @@ pub fn add_imports( pool: &CanisterPool, top: Option<&CanisterInfo>, // hackish ) -> DfxResult { - println!("MMM: {}", file.to_str().unwrap()); // FIXME: Remove. let base_path = file.parent().unwrap(); // FIXME: `unwrap()` let parent = if let Some(top) = top { Import::Canister(top.get_name().to_string()) // a little inefficient @@ -79,13 +78,11 @@ pub fn add_imports( .output() .with_context(|| format!("Error executing {:#?}", command))?; let output = String::from_utf8_lossy(&output.stdout); - println!("XXX: {}", output.to_string()); // FIXME: Remove. for line in output.lines() { let child = Import::try_from(line).context("Failed to create MotokoImport.")?; match &child { Import::FullPath(full_child_path) => { - println!("RRR: {}", full_child_path.to_str().unwrap()); // FIXME: Remove. // duplicate code let path2 = full_child_path.join(Path::new("lib.mo")); let child_path = if path2.exists() { @@ -374,7 +371,6 @@ impl TryFrom<&str> for Import { } None => (line, None), }; - println!("PPP: {:?} / {:?}", url, fullpath); // FIXME: Remove. let import = match url.find(':') { Some(index) => { if index >= line.len() - 1 { @@ -398,7 +394,6 @@ impl TryFrom<&str> for Import { } None => match fullpath { Some(fullpath) => { - println!("ZZZ: {}", fullpath); // FIXME: Remove. let path = PathBuf::from(fullpath); if !path.is_file() { // FIXME: What's about `/lib.mo` paths? return Err(DfxError::new(BuildError::DependencyError(format!( From b7f2109bc93887aabcc0c568235b006e69982f8a Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 03:39:19 +0300 Subject: [PATCH 217/354] more tests --- e2e/tests-dfx/make_like.bash | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index f6c5979fe7..cc88c4e060 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -50,6 +50,21 @@ teardown() { assert_command dfx deploy -vv dependency assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' + + touch src/lib.mo + assert_command dfx deploy -vv + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' + + touch src/dependency.mo + assert_command dfx deploy -vv + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' + + touch src/dependent.mo + assert_command dfx deploy -vv + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' } @test "trying to break dependency compiling: build" { From da921491d029945d5d666c7637bd0bac07a2e8e5 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 03:56:35 +0300 Subject: [PATCH 218/354] more tests --- e2e/tests-dfx/make_like.bash | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index cc88c4e060..0d394e9392 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -21,50 +21,70 @@ teardown() { assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains 'Installing code for canister dependent' + assert_contains 'Installing code for canister dependency' touch src/dependent.mo assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains 'Upgrading code for canister dependent' + assert_not_contains 'Upgrading code for canister dependency' touch src/dependency.mo assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains 'Upgrading code for canister dependent' + assert_contains 'Upgrading code for canister dependency' touch src/dependency.mo assert_command dfx deploy -vv dependency assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_not_contains 'Upgrading code for canister dependent' + assert_contains 'Upgrading code for canister dependency' assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains 'Upgrading code for canister dependent' + assert_not_contains 'Upgrading code for canister dependency' touch src/lib.mo assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains 'Upgrading code for canister dependent' + assert_contains 'Upgrading code for canister dependency' touch src/lib.mo assert_command dfx deploy -vv dependency assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_not_contains 'Upgrading code for canister dependent' + assert_contains 'Upgrading code for canister dependency' touch src/lib.mo assert_command dfx deploy -vv assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains 'Upgrading code for canister dependent' + assert_contains 'Upgrading code for canister dependency' touch src/dependency.mo assert_command dfx deploy -vv assert_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains 'Upgrading code for canister dependent' + assert_contains 'Upgrading code for canister dependency' touch src/dependent.mo assert_command dfx deploy -vv assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains 'Upgrading code for canister dependent' + assert_not_contains 'Upgrading code for canister dependency' } @test "trying to break dependency compiling: build" { @@ -104,4 +124,19 @@ teardown() { assert_command dfx build -vv dependency assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' + + touch src/lib.mo + assert_command dfx build -vv + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' + + touch src/dependency.mo + assert_command dfx build -vv + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' + + touch src/dependent.mo + assert_command dfx build -vv + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' } From c499e7da7b9f3a7b790b35e74952556b6d440af9 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 04:17:14 +0300 Subject: [PATCH 219/354] fixing wrong deployment --- e2e/tests-dfx/make_like.bash | 16 ++++++++++++++++ .../lib/operations/canister/deploy_canisters.rs | 1 + 2 files changed, 17 insertions(+) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 0d394e9392..570684032c 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -140,3 +140,19 @@ teardown() { assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' } + +@test "mix build and deploy" { + dfx_start + + assert_command dfx canister create dependency + assert_command dfx canister create dependent + assert_command dfx build -vv + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' + + assert_command dfx deploy -vv dependent + assert_not_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains 'Installing code for canister dependent' + assert_contains 'Installing code for canister dependency' +} \ No newline at end of file diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index efc8c25f03..b2c496823f 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -125,6 +125,7 @@ pub async fn deploy_canisters( }) .collect(); + // FIXME let canisters_to_install: &Vec = &order_names .clone() .into_iter() From 3b9b47b73595319d5e75d9d0609b7ab14764bc70 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 05:55:20 +0300 Subject: [PATCH 220/354] fixing a bug --- .../src/lib/operations/canister/deploy_canisters.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index b2c496823f..561426dc17 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -136,17 +136,21 @@ pub async fn deploy_canisters( config.get_config().get_canister_config(canister_name).map_or( true, |canister_config| canister_config.deploy)) }) + // .collect(); .map(|canister_name| -> DfxResult> { Ok( if let Some(canister) = canister_pool.get_first_canister_with_name(canister_name.as_str()) { + // FIXME: Double check, whether this OR condition is correct here: if canister.builder.should_build( &canister_pool, &canister.info, env.get_cache().as_ref(), env.get_logger(), - )? { + )? || + toplevel_canisters.iter().map(|cur_canister| cur_canister.get_name().to_string()).contains(&canister.get_name().to_string()) + { Some(canister_name) } else { None @@ -170,7 +174,7 @@ pub async fn deploy_canisters( { register_canisters( env, - &order_names, + &canisters_to_install, &initial_canister_id_store, with_cycles, specified_id_from_cli, @@ -246,7 +250,7 @@ fn canister_with_dependencies( #[context("Failed while trying to register all canisters.")] async fn register_canisters( env: &dyn Environment, - canister_names: &[String], + canister_names: &[String], // TODO: Should pass `&[Arc]` instead. canister_id_store: &CanisterIdStore, with_cycles: Option, specified_id_from_cli: Option, From 4868c2d2491b8599f4e353042dd46bbd637c2119 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 06:06:05 +0300 Subject: [PATCH 221/354] bug fix --- e2e/tests-dfx/make_like.bash | 6 ++--- .../operations/canister/deploy_canisters.rs | 27 +------------------ 2 files changed, 4 insertions(+), 29 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 570684032c..5b03bfa672 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -29,7 +29,7 @@ teardown() { assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' - assert_not_contains 'Upgrading code for canister dependency' + assert_contains 'Upgrading code for canister dependency' touch src/dependency.mo assert_command dfx deploy -vv dependent @@ -49,7 +49,7 @@ teardown() { assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' - assert_not_contains 'Upgrading code for canister dependency' + assert_contains 'Upgrading code for canister dependency' touch src/lib.mo assert_command dfx deploy -vv dependent @@ -84,7 +84,7 @@ teardown() { assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' - assert_not_contains 'Upgrading code for canister dependency' + assert_contains 'Upgrading code for canister dependency' } @test "trying to break dependency compiling: build" { diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 561426dc17..a42a351636 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -136,32 +136,7 @@ pub async fn deploy_canisters( config.get_config().get_canister_config(canister_name).map_or( true, |canister_config| canister_config.deploy)) }) - // .collect(); - .map(|canister_name| -> DfxResult> { - Ok( - if let Some(canister) = - canister_pool.get_first_canister_with_name(canister_name.as_str()) - { - // FIXME: Double check, whether this OR condition is correct here: - if canister.builder.should_build( - &canister_pool, - &canister.info, - env.get_cache().as_ref(), - env.get_logger(), - )? || - toplevel_canisters.iter().map(|cur_canister| cur_canister.get_name().to_string()).contains(&canister.get_name().to_string()) - { - Some(canister_name) - } else { - None - } - } else { - None - }, - ) - }) - .filter_map(|v| v.transpose()) - .try_collect()?; + .collect(); if some_canister.is_some() { info!(log, "Deploying: {}", canisters_to_install.join(" ")); From 5c535ed3007df08e14a593e482dc0537dbb7db06 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 06:08:58 +0300 Subject: [PATCH 222/354] removed outdated FIXME --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index a42a351636..9b64fbbf99 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -125,7 +125,6 @@ pub async fn deploy_canisters( }) .collect(); - // FIXME let canisters_to_install: &Vec = &order_names .clone() .into_iter() From 3b64f01538c52bf9595584c830d9d80b58924ca6 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 06:14:33 +0300 Subject: [PATCH 223/354] bug fix --- src/dfx/src/lib/builders/motoko.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 8e80201294..f37f50a3a2 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -9,7 +9,7 @@ use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE}; use crate::lib::models::canister::{CanisterPool, Import, ImportsTracker}; use crate::lib::package_arguments::{self, PackageArguments}; use crate::util::assets::management_idl; -use anyhow::Context; +use anyhow::{Context, anyhow}; use candid::Principal as CanisterId; use dfx_core::config::cache::Cache; use dfx_core::config::model::dfinity::{MetadataVisibility, Profile}; @@ -57,7 +57,7 @@ pub fn add_imports( pool: &CanisterPool, top: Option<&CanisterInfo>, // hackish ) -> DfxResult { - let base_path = file.parent().unwrap(); // FIXME: `unwrap()` + let base_path = file.parent().ok_or_else(|| anyhow!("Cannot get base directory"))?; let parent = if let Some(top) = top { Import::Canister(top.get_name().to_string()) // a little inefficient } else { From 12b5d0c69b5f3d704086c36f4b97aa31c2419710 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 06:31:50 +0300 Subject: [PATCH 224/354] removed an outdated TODO --- src/dfx/src/lib/builders/motoko.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index f37f50a3a2..75d8054d26 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -401,7 +401,7 @@ impl TryFrom<&str> for Import { path.display() )))); }; - Import::FullPath(path) // TODO: `""` is a hack. + Import::FullPath(path) } None => { return Err(DfxError::new(BuildError::DependencyError(format!( From 973df62bfb27ad10f99435e98735258a1b910041 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 06:41:40 +0300 Subject: [PATCH 225/354] eliminate a superfluous condition --- src/dfx/src/lib/builders/mod.rs | 8 +------- src/dfx/src/lib/builders/motoko.rs | 9 +-------- 2 files changed, 2 insertions(+), 15 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index dd14bcdd3c..1a99665704 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -324,13 +324,7 @@ pub trait CanisterBuilder { continue; } Import::FullPath(full_path) => { - // duplicate code - let path2 = full_path.join(Path::new("lib.mo")); - Some(if path2.exists() { - path2 - } else { - full_path.clone() - }) + Some(full_path.clone()) // TODO: Eliminate `clone`. } }; if let Some(imported_file) = imported_file { diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 75d8054d26..d12ebc3c27 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -83,14 +83,7 @@ pub fn add_imports( let child = Import::try_from(line).context("Failed to create MotokoImport.")?; match &child { Import::FullPath(full_child_path) => { - // duplicate code - let path2 = full_child_path.join(Path::new("lib.mo")); - let child_path = if path2.exists() { - &path2 - } else { - full_child_path - }; - add_imports_recursive(cache, child_path.as_path(), imports, pool, None)?; + add_imports_recursive(cache, full_child_path.as_path(), imports, pool, None)?; } Import::Canister(canister_name) => { // duplicate code From 7ab36ecebb3383977a77e063ae8ba8559fbe17ee Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 07:09:46 +0300 Subject: [PATCH 226/354] bug fix --- src/dfx/src/lib/models/canister.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 3d3fa375c6..dc170e1719 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -828,14 +828,18 @@ impl CanisterPool { self.build_canister_dependencies_graph(toplevel_canisters, env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? // TODO: If source files are unreadable, this panics. - let toplevel_nodes = toplevel_canisters.iter().map( - |canister| nodes.get(&canister.canister_id()).unwrap().clone()); + let toplevel_nodes: Vec = toplevel_canisters.iter().map( + |canister| -> DfxResult { + // FIXME + Ok(nodes.get(&canister.canister_id()).ok_or_else(|| anyhow!("No such canister {}.", canister.get_name()))?.clone()) + }) + .try_collect()?; // TODO: The following isn't very efficient. let mut reachable_nodes = HashMap::new(); - for start_node in toplevel_nodes { + for &start_node in toplevel_nodes.iter() { let mut bfs = Bfs::new(&graph, start_node); // or `Dfs`, does not matter while let Some(node) = bfs.next(&graph) { reachable_nodes.insert(node, ()); From f150763180aa367e6c443475c54418dbcd3fe868 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 07:25:08 +0300 Subject: [PATCH 227/354] bug fix --- src/dfx/src/lib/models/canister.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index dc170e1719..f27df8e089 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -618,12 +618,12 @@ impl CanisterPool { } }; let parent_canister = self - .get_first_canister_with_name(parent_name) - .unwrap() - .canister_id(); - dest_id_to_source_id + .get_first_canister_with_name(parent_name).unwrap(); + let parent_canister_id = parent_canister.canister_id(); + let parent_dest_id = *dest_id_to_source_id .entry(start_node) - .or_insert_with(|| dest_graph.add_node(parent_canister)); + .or_insert_with(|| dest_graph.add_node(parent_canister_id)); + dest_nodes.insert(parent_canister_id, parent_dest_id); let bfs = Bfs::new(&source_graph, start_node); let mut filtered_bfs = BfsFiltered::new(bfs); @@ -827,10 +827,8 @@ impl CanisterPool { let (graph, nodes) = self.build_canister_dependencies_graph(toplevel_canisters, env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? - // TODO: If source files are unreadable, this panics. let toplevel_nodes: Vec = toplevel_canisters.iter().map( |canister| -> DfxResult { - // FIXME Ok(nodes.get(&canister.canister_id()).ok_or_else(|| anyhow!("No such canister {}.", canister.get_name()))?.clone()) }) .try_collect()?; From dfc8aef64c3b3b56815e1dd3863236488169c9f5 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 07:31:34 +0300 Subject: [PATCH 228/354] removed a wrong FIXME --- src/dfx/src/lib/builders/motoko.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index d12ebc3c27..7df95dc233 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -388,7 +388,7 @@ impl TryFrom<&str> for Import { None => match fullpath { Some(fullpath) => { let path = PathBuf::from(fullpath); - if !path.is_file() { // FIXME: What's about `/lib.mo` paths? + if !path.is_file() { return Err(DfxError::new(BuildError::DependencyError(format!( "Cannot find import file {}", path.display() From c0db95bd88d3967dc34c84c98dae153678c835ee Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 07:48:50 +0300 Subject: [PATCH 229/354] cargo fmt --- src/dfx/src/commands/build.rs | 4 +- src/dfx/src/lib/builders/motoko.rs | 15 ++++- src/dfx/src/lib/models/canister.rs | 57 ++++++++++--------- .../operations/canister/deploy_canisters.rs | 20 +++++-- 4 files changed, 61 insertions(+), 35 deletions(-) diff --git a/src/dfx/src/commands/build.rs b/src/dfx/src/commands/build.rs index 7c880d55e5..7ed2434b28 100644 --- a/src/dfx/src/commands/build.rs +++ b/src/dfx/src/commands/build.rs @@ -91,7 +91,9 @@ pub fn exec(env1: &dyn Environment, opts: CanisterBuildOpts) -> DfxResult { .with_canisters_to_build(if let Some(canister) = opts.canister_name { vec![canister] // hacky } else { - config.get_config().get_canister_names_with_dependencies(None)? + config + .get_config() + .get_canister_names_with_dependencies(None)? // canister_pool.get_canister_list().iter().map(|&canister| canister.get_name().to_owned()) // hacky // .collect() }) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 7df95dc233..9f7d29304e 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -9,7 +9,7 @@ use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE}; use crate::lib::models::canister::{CanisterPool, Import, ImportsTracker}; use crate::lib::package_arguments::{self, PackageArguments}; use crate::util::assets::management_idl; -use anyhow::{Context, anyhow}; +use anyhow::{anyhow, Context}; use candid::Principal as CanisterId; use dfx_core::config::cache::Cache; use dfx_core::config::model::dfinity::{MetadataVisibility, Profile}; @@ -57,7 +57,9 @@ pub fn add_imports( pool: &CanisterPool, top: Option<&CanisterInfo>, // hackish ) -> DfxResult { - let base_path = file.parent().ok_or_else(|| anyhow!("Cannot get base directory"))?; + let base_path = file + .parent() + .ok_or_else(|| anyhow!("Cannot get base directory"))?; let parent = if let Some(top) = top { Import::Canister(top.get_name().to_string()) // a little inefficient } else { @@ -189,7 +191,14 @@ impl CanisterBuilder for MotokoBuilder { let id_map = pool .get_canister_list() .iter() - .filter(|&c| canister_info.get_dependencies().iter().map(|s| s.as_str()).find(|&name| name == c.get_name()).is_some()) // TODO: 1. Slow. 2. Use Motoko dependencies where appropriate. + .filter(|&c| { + canister_info + .get_dependencies() + .iter() + .map(|s| s.as_str()) + .find(|&name| name == c.get_name()) + .is_some() + }) // TODO: 1. Slow. 2. Use Motoko dependencies where appropriate. .map(|c| (c.get_name().to_string(), c.canister_id().to_text())) .collect(); diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index f27df8e089..1f73ad726b 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -617,8 +617,7 @@ impl CanisterPool { panic!("programming error"); } }; - let parent_canister = self - .get_first_canister_with_name(parent_name).unwrap(); + let parent_canister = self.get_first_canister_with_name(parent_name).unwrap(); let parent_canister_id = parent_canister.canister_id(); let parent_dest_id = *dest_id_to_source_id .entry(start_node) @@ -827,14 +826,18 @@ impl CanisterPool { let (graph, nodes) = self.build_canister_dependencies_graph(toplevel_canisters, env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? - let toplevel_nodes: Vec = toplevel_canisters.iter().map( - |canister| -> DfxResult { - Ok(nodes.get(&canister.canister_id()).ok_or_else(|| anyhow!("No such canister {}.", canister.get_name()))?.clone()) + let toplevel_nodes: Vec = toplevel_canisters + .iter() + .map(|canister| -> DfxResult { + Ok(nodes + .get(&canister.canister_id()) + .ok_or_else(|| anyhow!("No such canister {}.", canister.get_name()))? + .clone()) }) .try_collect()?; // TODO: The following isn't very efficient. - + let mut reachable_nodes = HashMap::new(); for &start_node in toplevel_nodes.iter() { @@ -844,17 +847,20 @@ impl CanisterPool { } } - let subgraph = graph - .filter_map( - |node, _| if reachable_nodes.contains_key(&node) { + let subgraph = graph.filter_map( + |node, _| { + if reachable_nodes.contains_key(&node) { Some(node) } else { None - }, - |edge, _| Some(edge)); + } + }, + |edge, _| Some(edge), + ); // TODO: better error message - let nodes = toposort(&subgraph, None).map_err(|_e| anyhow!("Cycle in node dependencies")) ?; + let nodes = + toposort(&subgraph, None).map_err(|_e| anyhow!("Cycle in node dependencies"))?; // Make topological order of our nodes: // let mut nodes2 = Vec::new(); @@ -887,20 +893,19 @@ impl CanisterPool { build_config: &BuildConfig, ) -> DfxResult>> { // TODO: The next statement is slow and confusing code. - let toplevel_canisters: Vec> = if let Some(canisters) = build_config.user_specified_canisters.clone() { - self - .canisters - .iter() - .filter(|c| canisters.contains(&c.get_name().to_string())) - .map(|canister| canister.clone()) - .collect() - } else { - self - .canisters - .iter() - .map(|canister| canister.clone()) - .collect() - }; + let toplevel_canisters: Vec> = + if let Some(canisters) = build_config.user_specified_canisters.clone() { + self.canisters + .iter() + .filter(|c| canisters.contains(&c.get_name().to_string())) + .map(|canister| canister.clone()) + .collect() + } else { + self.canisters + .iter() + .map(|canister| canister.clone()) + .collect() + }; let order = self.build_order(env, &toplevel_canisters.clone())?; // TODO: Eliminate `clone`. self.step_prebuild_all(log, build_config, toplevel_canisters.as_slice()) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 9b64fbbf99..15cc5560e5 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -104,11 +104,17 @@ pub async fn deploy_canisters( }) .collect(), }; - let toplevel_canisters = toplevel_canisters.into_iter() + let toplevel_canisters = toplevel_canisters + .into_iter() .map(|name: String| -> DfxResult<_> { - Ok(canister_pool.get_first_canister_with_name(name.as_str()) - .ok_or_else(|| anyhow!("A canister with the name '{}' was not found in the current project.", name.clone()))? - ) + Ok(canister_pool + .get_first_canister_with_name(name.as_str()) + .ok_or_else(|| { + anyhow!( + "A canister with the name '{}' was not found in the current project.", + name.clone() + ) + })?) }) .try_collect()?; @@ -324,7 +330,11 @@ async fn build_canisters( let build_config = BuildConfig::from_config(config, env.get_network_descriptor().is_playground())? .with_canisters_to_build( - toplevel_canisters.iter().map(|canister| canister.get_name().to_string()).collect()) // hack + toplevel_canisters + .iter() + .map(|canister| canister.get_name().to_string()) + .collect(), + ) // hack .with_env_file(env_file); canister_pool.build_or_fail(env, log, &build_config).await?; Ok(()) From 882514bfb820e9c54926d554369c219c612e5842 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 08:16:25 +0300 Subject: [PATCH 230/354] eliminated Clippy warnings --- src/dfx/src/lib/builders/motoko.rs | 3 +-- src/dfx/src/lib/models/canister.rs | 19 ++++++++----------- .../operations/canister/deploy_canisters.rs | 15 +++++++++------ 3 files changed, 18 insertions(+), 19 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 9f7d29304e..6ad5d2a5c8 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -196,8 +196,7 @@ impl CanisterBuilder for MotokoBuilder { .get_dependencies() .iter() .map(|s| s.as_str()) - .find(|&name| name == c.get_name()) - .is_some() + .any(|name| name == c.get_name()) }) // TODO: 1. Slow. 2. Use Motoko dependencies where appropriate. .map(|c| (c.get_name().to_string(), c.canister_id().to_text())) .collect(); diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 1f73ad726b..224cf902b7 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -567,7 +567,7 @@ impl CanisterPool { #[context("Failed to build dependencies graph for canister pool.")] pub fn build_canister_dependencies_graph( &self, - toplevel_canisters: &Vec>, + toplevel_canisters: &[Arc], cache: &dyn Cache, ) -> DfxResult<(DiGraph, HashMap)> { for canister in &self.canisters { @@ -820,7 +820,7 @@ impl CanisterPool { pub fn build_order( &self, env: &dyn Environment, - toplevel_canisters: &Vec>, + toplevel_canisters: &[Arc], ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); let (graph, nodes) = @@ -829,10 +829,10 @@ impl CanisterPool { let toplevel_nodes: Vec = toplevel_canisters .iter() .map(|canister| -> DfxResult { - Ok(nodes + nodes .get(&canister.canister_id()) - .ok_or_else(|| anyhow!("No such canister {}.", canister.get_name()))? - .clone()) + .copied() + .ok_or_else(|| anyhow!("No such canister {}.", canister.get_name())) }) .try_collect()?; @@ -898,15 +898,12 @@ impl CanisterPool { self.canisters .iter() .filter(|c| canisters.contains(&c.get_name().to_string())) - .map(|canister| canister.clone()) + .cloned() .collect() } else { - self.canisters - .iter() - .map(|canister| canister.clone()) - .collect() + self.canisters.clone() }; - let order = self.build_order(env, &toplevel_canisters.clone())?; // TODO: Eliminate `clone`. + let order = self.build_order(env, &toplevel_canisters)?; // TODO: Eliminate `clone`. self.step_prebuild_all(log, build_config, toplevel_canisters.as_slice()) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 15cc5560e5..7ff9d93693 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -25,6 +25,7 @@ use ic_utils::interfaces::management_canister::builders::InstallMode; use icrc_ledger_types::icrc1::account::Subaccount; use itertools::Itertools; use slog::info; +// use core::slice::SlicePattern; use std::convert::TryFrom; use std::path::{Path, PathBuf}; use std::sync::Arc; @@ -107,19 +108,21 @@ pub async fn deploy_canisters( let toplevel_canisters = toplevel_canisters .into_iter() .map(|name: String| -> DfxResult<_> { - Ok(canister_pool + canister_pool .get_first_canister_with_name(name.as_str()) .ok_or_else(|| { anyhow!( "A canister with the name '{}' was not found in the current project.", name.clone() ) - })?) + }) }) - .try_collect()?; + // .map(|v| &v) + .try_collect::, Vec>, _>()?; + let toplevel_canisters: &[Arc] = &toplevel_canisters; // TODO: `build_order` is called two times during deployment of a new canister. - let order = canister_pool.build_order(env, &toplevel_canisters)?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. + let order = canister_pool.build_order(env, toplevel_canisters)?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. let order_names: Vec = order .iter() .map(|canister| { @@ -154,7 +157,7 @@ pub async fn deploy_canisters( { register_canisters( env, - &canisters_to_install, + canisters_to_install, &initial_canister_id_store, with_cycles, specified_id_from_cli, @@ -173,7 +176,7 @@ pub async fn deploy_canisters( build_canisters( env, // &order_names, - &toplevel_canisters.as_slice(), + toplevel_canisters, &config, env_file.clone(), &canister_pool, From 5ad7844edf0ffdb50260dbf1ef629a2160599df5 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 08:43:11 +0300 Subject: [PATCH 231/354] remove an outdated comment --- src/dfx/src/lib/operations/canister/deploy_canisters.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/dfx/src/lib/operations/canister/deploy_canisters.rs b/src/dfx/src/lib/operations/canister/deploy_canisters.rs index 7ff9d93693..58de5984c5 100644 --- a/src/dfx/src/lib/operations/canister/deploy_canisters.rs +++ b/src/dfx/src/lib/operations/canister/deploy_canisters.rs @@ -122,7 +122,7 @@ pub async fn deploy_canisters( let toplevel_canisters: &[Arc] = &toplevel_canisters; // TODO: `build_order` is called two times during deployment of a new canister. - let order = canister_pool.build_order(env, toplevel_canisters)?; // TODO: `Some` here is a hack. // TODO: Eliminate `clone`. + let order = canister_pool.build_order(env, toplevel_canisters)?; let order_names: Vec = order .iter() .map(|canister| { From 85f439a051f161d149fd66a7e3862dddaade0114 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 10:53:49 +0300 Subject: [PATCH 232/354] don't update .did file if not changed --- src/dfx/src/lib/models/canister.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 224cf902b7..e197961739 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -354,7 +354,9 @@ impl Canister { continue; } dfx_core::fs::composite::ensure_parent_dir_exists(&target)?; - dfx_core::fs::write(&target, &service_did)?; + if dfx_core::fs::read_to_string(&target)? != service_did { + dfx_core::fs::write(&target, &service_did)?; + } dfx_core::fs::set_permissions_readwrite(&target)?; } From e1d9e9f0f0e664cb91fdf3801b3ea524dacd3983 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 10:57:25 +0300 Subject: [PATCH 233/354] ignore canisters with unchanged .did --- src/dfx/src/lib/builders/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 1a99665704..2c09ed0dae 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -296,8 +296,8 @@ pub trait CanisterBuilder { if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { - let main_file = canister.get_info().get_main_file(); - main_file.map(|main_file| main_file.to_owned()) + let main_file = canister.get_info().get_service_idl_path(); + Some(main_file) } else { None } From 9097b693d0125cf1d98fea66bef41bbe85c5efc3 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 11:21:44 +0300 Subject: [PATCH 234/354] docs --- docs/cli-reference/dfx-build.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/cli-reference/dfx-build.mdx b/docs/cli-reference/dfx-build.mdx index 278792cfbb..90a660e0ec 100644 --- a/docs/cli-reference/dfx-build.mdx +++ b/docs/cli-reference/dfx-build.mdx @@ -10,7 +10,7 @@ Note that you can only run this command from within the project directory struct The `dfx build` command looks for the source code to compile using the information you have configured under the `canisters` section in the `dfx.json` configuration file. -For compilation speed reasons, `dfx build` (and `dfx deploy`) don't recompile canisters, all dependencies of which are elder than the existing WebAssembly (from the previous compilation). +For compilation speed reasons, `dfx build` (and `dfx deploy`) don't recompile canisters, all dependencies of which are elder than the existing Candid file (from the previous compilation). Moreover, the Candid (`.did`) file is updated only when strictly necessary (that is on public interface change). This makes dependent canisters not recompile when a dependency canister changes without interface change. ## Basic usage From 83e908e81a1e50adca28686d5f426745072381e7 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 11:22:15 +0300 Subject: [PATCH 235/354] rebuild only if Candid changed --- src/dfx/src/lib/builders/motoko.rs | 1 - src/dfx/src/lib/models/canister.rs | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 6ad5d2a5c8..58cf0d9608 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -174,7 +174,6 @@ impl CanisterBuilder for MotokoBuilder { } } - /// TODO: Ideally, should make inter-canister dependencies to rely on `.did` file changed or not. #[context("Failed to build Motoko canister '{}'.", canister_info.get_name())] fn build( &self, diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index e197961739..b6e83fc346 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -354,7 +354,7 @@ impl Canister { continue; } dfx_core::fs::composite::ensure_parent_dir_exists(&target)?; - if dfx_core::fs::read_to_string(&target)? != service_did { + if !target.exists() || dfx_core::fs::read_to_string(&target)? != service_did { // TODO: Make atomic operation. dfx_core::fs::write(&target, &service_did)?; } dfx_core::fs::set_permissions_readwrite(&target)?; From fbf5e4eae250a7abe9986324a0599f621e688ea1 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 11:22:29 +0300 Subject: [PATCH 236/354] added (failed) tests --- e2e/tests-dfx/make_like.bash | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 5b03bfa672..98172b5971 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -26,14 +26,14 @@ teardown() { touch src/dependent.mo assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' assert_contains 'Upgrading code for canister dependency' touch src/dependency.mo assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' assert_contains 'Upgrading code for canister dependency' @@ -54,7 +54,7 @@ teardown() { touch src/lib.mo assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' - assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' assert_contains 'Upgrading code for canister dependency' @@ -104,7 +104,7 @@ teardown() { touch src/dependency.mo assert_command dfx build -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' - assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' touch src/dependency.mo assert_command dfx build -vv dependency @@ -118,7 +118,7 @@ teardown() { touch src/lib.mo assert_command dfx build -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' - assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' touch src/lib.mo assert_command dfx build -vv dependency @@ -128,11 +128,11 @@ teardown() { touch src/lib.mo assert_command dfx build -vv assert_contains '"moc-wrapper" "src/dependent.mo"' - assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_not_contains '"moc-wrapper" "src/dependency.mo"' touch src/dependency.mo assert_command dfx build -vv - assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' touch src/dependent.mo @@ -155,4 +155,6 @@ teardown() { assert_not_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Installing code for canister dependent' assert_contains 'Installing code for canister dependency' -} \ No newline at end of file +} + +# TODO: Test changes in `dependency.mo` that change the `.did` file. From 0d490ad6cbde89894e8c6d3acff332b6456e8877 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 22:45:01 +0300 Subject: [PATCH 237/354] bug fix --- src/dfx/src/lib/builders/mod.rs | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 2c09ed0dae..ad9fa21b5f 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -287,16 +287,31 @@ pub trait CanisterBuilder { } else { panic!("programming error"); }; + println!("output_wasm_path: {}", output_wasm_path.to_str().unwrap()); // FIXME: Remove. let mut import_iter = Bfs::new(&imports.graph, start); + let mut top_level = true; // link to our main Canister with `.wasm` loop { if let Some(import) = import_iter.next(&imports.graph) { + let top_level_cur = top_level; + top_level = false; + println!("IMPORT: {:?}", import); // FIXME: Remove. let subnode = &imports.graph[import]; + if top_level_cur { + assert!(match subnode { + Import::Canister(_) => true, + _ => false, + }, "the top-level import must be a canister"); + } let imported_file = match subnode { Import::Canister(canister_name) => { if let Some(canister) = pool.get_first_canister_with_name(canister_name.as_str()) { - let main_file = canister.get_info().get_service_idl_path(); + let main_file = if top_level_cur { + canister.get_info().get_output_wasm_path().to_path_buf() + } else { + canister.get_info().get_service_idl_path() + }; Some(main_file) } else { None @@ -327,7 +342,9 @@ pub trait CanisterBuilder { Some(full_path.clone()) // TODO: Eliminate `clone`. } }; + println!("FILE: {:?}", imported_file); // FIXME: Remove. if let Some(imported_file) = imported_file { + println!("FILE2: {:?}", imported_file.to_str().unwrap()); // FIXME: Remove. let imported_file_metadata = metadata(&imported_file)?; let imported_file_time = imported_file_metadata.modified()?; if imported_file_time > wasm_file_time { From 26389866df070b24749628f5ce5962c1c12d3f8a Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 23:10:38 +0300 Subject: [PATCH 238/354] tests passed --- e2e/tests-dfx/make_like.bash | 18 +++++++++--------- src/dfx/src/lib/builders/mod.rs | 4 ++-- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 98172b5971..6b5f476e78 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -26,7 +26,7 @@ teardown() { touch src/dependent.mo assert_command dfx deploy -vv dependent - assert_not_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' assert_contains 'Upgrading code for canister dependency' @@ -46,7 +46,7 @@ teardown() { assert_contains 'Upgrading code for canister dependency' assert_command dfx deploy -vv dependent - assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' assert_contains 'Upgrading code for canister dependency' @@ -54,7 +54,7 @@ teardown() { touch src/lib.mo assert_command dfx deploy -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' - assert_not_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' assert_contains 'Upgrading code for canister dependency' @@ -74,7 +74,7 @@ teardown() { touch src/dependency.mo assert_command dfx deploy -vv - assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' assert_contains 'Upgrading code for canister dependency' @@ -103,8 +103,8 @@ teardown() { touch src/dependency.mo assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "src/dependent.mo"' - assert_not_contains '"moc-wrapper" "src/dependency.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' touch src/dependency.mo assert_command dfx build -vv dependency @@ -112,13 +112,13 @@ teardown() { assert_contains '"moc-wrapper" "src/dependency.mo"' assert_command dfx build -vv dependent - assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_not_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' touch src/lib.mo assert_command dfx build -vv dependent assert_contains '"moc-wrapper" "src/dependent.mo"' - assert_not_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' touch src/lib.mo assert_command dfx build -vv dependency @@ -128,7 +128,7 @@ teardown() { touch src/lib.mo assert_command dfx build -vv assert_contains '"moc-wrapper" "src/dependent.mo"' - assert_not_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' touch src/dependency.mo assert_command dfx build -vv diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index ad9fa21b5f..bb33d41a1c 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -4,7 +4,7 @@ use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::models::canister::CanisterPool; use crate::lib::models::canister::Import; -use anyhow::{bail, Context}; +use anyhow::{anyhow, bail, Context}; use candid::Principal as CanisterId; use candid_parser::utils::CandidSource; use dfx_core::config::cache::Cache; @@ -308,7 +308,7 @@ pub trait CanisterBuilder { pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = if top_level_cur { - canister.get_info().get_output_wasm_path().to_path_buf() + canister.get_info().get_main_file().ok_or_else(|| anyhow!("No main file."))?.to_path_buf() } else { canister.get_info().get_service_idl_path() }; From 61f7bc3d3e152041281a298fe7b9acb98b672f4b Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 23:17:14 +0300 Subject: [PATCH 239/354] removed tracing --- src/dfx/src/lib/builders/mod.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index bb33d41a1c..67e224a946 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -287,14 +287,12 @@ pub trait CanisterBuilder { } else { panic!("programming error"); }; - println!("output_wasm_path: {}", output_wasm_path.to_str().unwrap()); // FIXME: Remove. let mut import_iter = Bfs::new(&imports.graph, start); let mut top_level = true; // link to our main Canister with `.wasm` loop { if let Some(import) = import_iter.next(&imports.graph) { let top_level_cur = top_level; top_level = false; - println!("IMPORT: {:?}", import); // FIXME: Remove. let subnode = &imports.graph[import]; if top_level_cur { assert!(match subnode { @@ -342,9 +340,7 @@ pub trait CanisterBuilder { Some(full_path.clone()) // TODO: Eliminate `clone`. } }; - println!("FILE: {:?}", imported_file); // FIXME: Remove. if let Some(imported_file) = imported_file { - println!("FILE2: {:?}", imported_file.to_str().unwrap()); // FIXME: Remove. let imported_file_metadata = metadata(&imported_file)?; let imported_file_time = imported_file_metadata.modified()?; if imported_file_time > wasm_file_time { From 410c6df5ec31e0897d94cb7b8a2ee1f10086f8f0 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Fri, 26 Apr 2024 23:36:29 +0300 Subject: [PATCH 240/354] more tests --- e2e/assets/make_like/src/dependency_altered.mo | 10 ++++++++++ e2e/tests-dfx/make_like.bash | 12 ++++++++++++ 2 files changed, 22 insertions(+) create mode 100644 e2e/assets/make_like/src/dependency_altered.mo diff --git a/e2e/assets/make_like/src/dependency_altered.mo b/e2e/assets/make_like/src/dependency_altered.mo new file mode 100644 index 0000000000..11d3ed6f57 --- /dev/null +++ b/e2e/assets/make_like/src/dependency_altered.mo @@ -0,0 +1,10 @@ +import L "lib"; + +actor { + public shared func greet(name: Text) : async Text { + return "Hello, " # name # "!"; + }; + + public shared func anotherFunction() : async () { + }; +} \ No newline at end of file diff --git a/e2e/tests-dfx/make_like.bash b/e2e/tests-dfx/make_like.bash index 6b5f476e78..b95726ce03 100644 --- a/e2e/tests-dfx/make_like.bash +++ b/e2e/tests-dfx/make_like.bash @@ -85,6 +85,13 @@ teardown() { assert_not_contains '"moc-wrapper" "src/dependency.mo"' assert_contains 'Upgrading code for canister dependent' assert_contains 'Upgrading code for canister dependency' + + cp src/dependency_altered.mo src/dependency.mo + assert_command dfx deploy -vv + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' + assert_contains 'Upgrading code for canister dependent' + assert_contains 'Upgrading code for canister dependency' } @test "trying to break dependency compiling: build" { @@ -139,6 +146,11 @@ teardown() { assert_command dfx build -vv assert_contains '"moc-wrapper" "src/dependent.mo"' assert_not_contains '"moc-wrapper" "src/dependency.mo"' + + cp src/dependency_altered.mo src/dependency.mo + assert_command dfx build -vv + assert_contains '"moc-wrapper" "src/dependent.mo"' + assert_contains '"moc-wrapper" "src/dependency.mo"' } @test "mix build and deploy" { From 79f1faff16e735b9b00b2a950281edffc4afc53e Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 27 Apr 2024 00:30:16 +0300 Subject: [PATCH 241/354] bug fix --- src/dfx/src/lib/builders/mod.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/dfx/src/lib/builders/mod.rs b/src/dfx/src/lib/builders/mod.rs index 67e224a946..3caf4c67ce 100644 --- a/src/dfx/src/lib/builders/mod.rs +++ b/src/dfx/src/lib/builders/mod.rs @@ -4,7 +4,7 @@ use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; use crate::lib::models::canister::CanisterPool; use crate::lib::models::canister::Import; -use anyhow::{anyhow, bail, Context}; +use anyhow::{bail, Context}; use candid::Principal as CanisterId; use candid_parser::utils::CandidSource; use dfx_core::config::cache::Cache; @@ -306,7 +306,11 @@ pub trait CanisterBuilder { pool.get_first_canister_with_name(canister_name.as_str()) { let main_file = if top_level_cur { - canister.get_info().get_main_file().ok_or_else(|| anyhow!("No main file."))?.to_path_buf() + if let Some(main_file) = canister.get_info().get_main_file() { + main_file.to_path_buf() + } else { + continue; + } } else { canister.get_info().get_service_idl_path() }; From b2b48db2ed31681dccc55494e337521ad47b5409 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 27 Apr 2024 01:53:12 +0300 Subject: [PATCH 242/354] half-done bug fix --- src/dfx/src/lib/models/canister.rs | 57 ++++++++++++++++-------------- 1 file changed, 30 insertions(+), 27 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 224cf902b7..4fba201377 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -567,7 +567,7 @@ impl CanisterPool { #[context("Failed to build dependencies graph for canister pool.")] pub fn build_canister_dependencies_graph( &self, - toplevel_canisters: &[Arc], + toplevel_canisters: &[&Canister], cache: &dyn Cache, ) -> DfxResult<(DiGraph, HashMap)> { for canister in &self.canisters { @@ -671,7 +671,7 @@ impl CanisterPool { Ok((dest_graph, dest_nodes)) } - fn canister_dependencies(&self, toplevel_canisters: &[Arc]) -> Vec> { + fn canister_dependencies(&self, toplevel_canisters: &[&Canister]) -> Vec> { let iter = toplevel_canisters .iter() .flat_map(|canister| { @@ -680,7 +680,7 @@ impl CanisterPool { .imports .borrow() .nodes - .get(&Import::Canister(canister.as_ref().get_name().to_owned())) + .get(&Import::Canister(canister.get_name().to_owned())) .unwrap(); let imports = self.imports.borrow(); let neighbors = imports.graph.neighbors(parent_node); @@ -708,13 +708,30 @@ impl CanisterPool { #[context("Failed step_prebuild_all.")] fn step_prebuild_all( &self, - log: &Logger, build_config: &BuildConfig, - toplevel_canisters: &[Arc], ) -> DfxResult<()> { + // cargo audit + if self + .canisters_to_build(build_config) + .iter() + .any(|can| can.info.is_rust()) + { + self.run_cargo_audit()?; + } else { + trace!( + self.logger, + "No canister of type 'rust' found. Not trying to run 'cargo audit'." + ) + } + + Ok(()) + } + + fn step_prebuild(&self, build_config: &BuildConfig, canister: &Canister) -> DfxResult<()> { // moc expects all .did files of dependencies to be in with name .did. // Copy .did files into this temporary directory. - for canister in self.canister_dependencies(toplevel_canisters) { + let log = self.get_logger(); + for canister in self.canister_dependencies(&[canister]) { let maybe_from = if let Some(remote_candid) = canister.info.get_remote_candid() { Some(remote_candid) } else { @@ -753,24 +770,6 @@ impl CanisterPool { } } - // cargo audit - if self - .canisters_to_build(build_config) - .iter() - .any(|can| can.info.is_rust()) - { - self.run_cargo_audit()?; - } else { - trace!( - self.logger, - "No canister of type 'rust' found. Not trying to run 'cargo audit'." - ) - } - - Ok(()) - } - - fn step_prebuild(&self, build_config: &BuildConfig, canister: &Canister) -> DfxResult<()> { canister.prebuild(self, build_config) } @@ -805,7 +804,10 @@ impl CanisterPool { ) -> DfxResult<()> { // We don't want to simply remove the whole directory, as in the future, // we may want to keep the IDL files downloaded from network. - for canister in self.canister_dependencies(toplevel_canisters) { + // TODO: The following `map` is a hack. + for canister in &self.canister_dependencies( + &toplevel_canisters.iter().map(|canister| canister.as_ref()).collect::>() + ) { let idl_root = &build_config.idl_root; let canister_id = canister.canister_id(); let idl_file_path = idl_root.join(canister_id.to_text()).with_extension("did"); @@ -823,8 +825,9 @@ impl CanisterPool { toplevel_canisters: &[Arc], ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); + // TODO: The following `map` is a hack. let (graph, nodes) = - self.build_canister_dependencies_graph(toplevel_canisters, env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? + self.build_canister_dependencies_graph(&toplevel_canisters.iter().map(|canister| canister.as_ref()).collect::>(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? let toplevel_nodes: Vec = toplevel_canisters .iter() @@ -905,7 +908,7 @@ impl CanisterPool { }; let order = self.build_order(env, &toplevel_canisters)?; // TODO: Eliminate `clone`. - self.step_prebuild_all(log, build_config, toplevel_canisters.as_slice()) + self.step_prebuild_all(build_config) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; let mut result = Vec::new(); From 4bd860b45dfbba03fc494ebd5af9b98748accc96 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 27 Apr 2024 01:58:06 +0300 Subject: [PATCH 243/354] bug fix (untested) --- src/dfx/src/lib/models/canister.rs | 35 +++++++++++++++--------------- 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 4fba201377..3e9328f77e 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -728,7 +728,9 @@ impl CanisterPool { } fn step_prebuild(&self, build_config: &BuildConfig, canister: &Canister) -> DfxResult<()> { + canister.prebuild(self, build_config)?; // moc expects all .did files of dependencies to be in with name .did. + // Copy .did files into this temporary directory. let log = self.get_logger(); for canister in self.canister_dependencies(&[canister]) { @@ -770,7 +772,7 @@ impl CanisterPool { } } - canister.prebuild(self, build_config) + Ok(()) } fn step_build<'a>( @@ -787,6 +789,18 @@ impl CanisterPool { canister: &Canister, build_output: &BuildOutput, ) -> DfxResult<()> { + // We don't want to simply remove the whole directory, as in the future, + // we may want to keep the IDL files downloaded from network. + // TODO: The following `map` is a hack. + for canister in &self.canister_dependencies(&[&canister]) { + let idl_root = &build_config.idl_root; + let canister_id = canister.canister_id(); + let idl_file_path = idl_root.join(canister_id.to_text()).with_extension("did"); + + // Ignore errors (e.g. File Not Found). + let _ = std::fs::remove_file(idl_file_path); + } + canister.candid_post_process(self.get_logger(), build_config, build_output)?; canister.wasm_post_process(self.get_logger(), build_output)?; @@ -798,24 +812,9 @@ impl CanisterPool { fn step_postbuild_all( &self, - build_config: &BuildConfig, + _build_config: &BuildConfig, _order: &[CanisterId], - toplevel_canisters: &[Arc], ) -> DfxResult<()> { - // We don't want to simply remove the whole directory, as in the future, - // we may want to keep the IDL files downloaded from network. - // TODO: The following `map` is a hack. - for canister in &self.canister_dependencies( - &toplevel_canisters.iter().map(|canister| canister.as_ref()).collect::>() - ) { - let idl_root = &build_config.idl_root; - let canister_id = canister.canister_id(); - let idl_file_path = idl_root.join(canister_id.to_text()).with_extension("did"); - - // Ignore errors (e.g. File Not Found). - let _ = std::fs::remove_file(idl_file_path); - } - Ok(()) } @@ -960,7 +959,7 @@ impl CanisterPool { } } - self.step_postbuild_all(build_config, &order, toplevel_canisters.as_slice()) + self.step_postbuild_all(build_config, &order) .map_err(|e| DfxError::new(BuildError::PostBuildAllStepFailed(Box::new(e))))?; Ok(result) From c42605caac38f77bd4c6ea7ceb2622b97414f291 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 27 Apr 2024 18:20:06 +0300 Subject: [PATCH 244/354] added (failing) test add_dependency.bash --- e2e/assets/add_dependency/a.mo | 5 ++++ e2e/assets/add_dependency/b.mo | 3 +++ e2e/assets/add_dependency/dfx.json | 14 ++++++++++ e2e/assets/add_dependency/dfx_corrected.json | 11 ++++++++ e2e/tests-dfx/add_dependency.bash | 28 ++++++++++++++++++++ 5 files changed, 61 insertions(+) create mode 100644 e2e/assets/add_dependency/a.mo create mode 100644 e2e/assets/add_dependency/b.mo create mode 100644 e2e/assets/add_dependency/dfx.json create mode 100644 e2e/assets/add_dependency/dfx_corrected.json create mode 100644 e2e/tests-dfx/add_dependency.bash diff --git a/e2e/assets/add_dependency/a.mo b/e2e/assets/add_dependency/a.mo new file mode 100644 index 0000000000..8599f4e34a --- /dev/null +++ b/e2e/assets/add_dependency/a.mo @@ -0,0 +1,5 @@ +import _ "canister:b" + +actor { + public shared func f() {}; +} \ No newline at end of file diff --git a/e2e/assets/add_dependency/b.mo b/e2e/assets/add_dependency/b.mo new file mode 100644 index 0000000000..2f669ccd60 --- /dev/null +++ b/e2e/assets/add_dependency/b.mo @@ -0,0 +1,3 @@ +actor { + public shared func g() {}; +} \ No newline at end of file diff --git a/e2e/assets/add_dependency/dfx.json b/e2e/assets/add_dependency/dfx.json new file mode 100644 index 0000000000..2581ea52a3 --- /dev/null +++ b/e2e/assets/add_dependency/dfx.json @@ -0,0 +1,14 @@ +{ + "canisters": { + "a": { + "main": "a.mo", + "dependencies": ["c"] + }, + "b": { + "main": "b.mo" + }, + "c": { + "main": "b.mo" + } + } +} \ No newline at end of file diff --git a/e2e/assets/add_dependency/dfx_corrected.json b/e2e/assets/add_dependency/dfx_corrected.json new file mode 100644 index 0000000000..f6171e9eba --- /dev/null +++ b/e2e/assets/add_dependency/dfx_corrected.json @@ -0,0 +1,11 @@ +{ + "canisters": { + "a": { + "main": "a.mo", + "dependencies": ["b"] + }, + "b": { + "main": "b.mo" + } + } +} \ No newline at end of file diff --git a/e2e/tests-dfx/add_dependency.bash b/e2e/tests-dfx/add_dependency.bash new file mode 100644 index 0000000000..d1b1d4dbc0 --- /dev/null +++ b/e2e/tests-dfx/add_dependency.bash @@ -0,0 +1,28 @@ +#!/usr/bin/env bats + +load ../utils/_ + +setup() { + standard_setup + + install_asset add_dependency +} + +teardown() { + dfx_stop + + standard_teardown +} + +@test "compiles after correcting dependency" { + install_asset base + + dfx_start + + # fails + dfx deploy || true + + cp dfx_corrected.json dfx.json + + assert_command dfx deploy +} From fc75b7dbc5c43d9899ac3609f54b7b98b8111901 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 27 Apr 2024 18:26:38 +0300 Subject: [PATCH 245/354] English grammar --- e2e/tests-dfx/add_dependency.bash | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/e2e/tests-dfx/add_dependency.bash b/e2e/tests-dfx/add_dependency.bash index d1b1d4dbc0..ae59af0964 100644 --- a/e2e/tests-dfx/add_dependency.bash +++ b/e2e/tests-dfx/add_dependency.bash @@ -14,7 +14,7 @@ teardown() { standard_teardown } -@test "compiles after correcting dependency" { +@test "compiles after correcting a dependency" { install_asset base dfx_start From f178964880d56f519394f97d6feda57b81264ca9 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 27 Apr 2024 18:31:43 +0300 Subject: [PATCH 246/354] better test --- e2e/tests-dfx/add_dependency.bash | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/e2e/tests-dfx/add_dependency.bash b/e2e/tests-dfx/add_dependency.bash index ae59af0964..1e17e49230 100644 --- a/e2e/tests-dfx/add_dependency.bash +++ b/e2e/tests-dfx/add_dependency.bash @@ -14,13 +14,13 @@ teardown() { standard_teardown } +# Check that attempt to compile before correcting dependencies does not break further compilation. @test "compiles after correcting a dependency" { install_asset base dfx_start - # fails - dfx deploy || true + assert_command_fail dfx deploy cp dfx_corrected.json dfx.json From df9dd8dae1c3ef0189f35c1028f68b0e5762b1a7 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sat, 27 Apr 2024 23:42:52 +0300 Subject: [PATCH 247/354] removed an undersophisticated error --- src/dfx/src/lib/models/canister.rs | 49 +++++++++++++----------------- 1 file changed, 21 insertions(+), 28 deletions(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 3e9328f77e..2757eb49a2 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -893,7 +893,7 @@ impl CanisterPool { env: &dyn Environment, log: &Logger, build_config: &BuildConfig, - ) -> DfxResult>> { + ) -> DfxResult { // TODO: The next statement is slow and confusing code. let toplevel_canisters: Vec> = if let Some(canisters) = build_config.user_specified_canisters.clone() { @@ -910,7 +910,6 @@ impl CanisterPool { self.step_prebuild_all(build_config) .map_err(|e| DfxError::new(BuildError::PreBuildAllStepFailed(Box::new(e))))?; - let mut result = Vec::new(); for canister_id in &order { if let Some(canister) = self.get_canister(canister_id) { trace!(log, "Building canister '{}'.", canister.get_name()); @@ -925,36 +924,34 @@ impl CanisterPool { env.get_cache().as_ref(), env.get_logger(), )? { - result.push( - self.step_prebuild(build_config, canister) - .map_err(|e| { - BuildError::PreBuildStepFailed( + self.step_prebuild(build_config, canister) + .map_err(|e| { + BuildError::PreBuildStepFailed( + *canister_id, + canister.get_name().to_string(), + Box::new(e), + ) + }) + .and_then(|_| { + self.step_build(build_config, canister).map_err(|e| { + BuildError::BuildStepFailed( *canister_id, canister.get_name().to_string(), Box::new(e), ) }) - .and_then(|_| { - self.step_build(build_config, canister).map_err(|e| { - BuildError::BuildStepFailed( + }) + .and_then(|o: &BuildOutput| { + self.step_postbuild(build_config, canister, o) + .map_err(|e| { + BuildError::PostBuildStepFailed( *canister_id, canister.get_name().to_string(), Box::new(e), ) }) - }) - .and_then(|o| { - self.step_postbuild(build_config, canister, o) - .map_err(|e| { - BuildError::PostBuildStepFailed( - *canister_id, - canister.get_name().to_string(), - Box::new(e), - ) - }) - .map(|_| o) - }), - ); + .map(|_| o) + })?; } } } @@ -962,7 +959,7 @@ impl CanisterPool { self.step_postbuild_all(build_config, &order) .map_err(|e| DfxError::new(BuildError::PostBuildAllStepFailed(Box::new(e))))?; - Ok(result) + Ok(()) } /// Build all canisters, failing with the first that failed the build. Will return @@ -977,11 +974,7 @@ impl CanisterPool { build_config: &BuildConfig, ) -> DfxResult<()> { self.download(build_config).await?; - let outputs = self.build(env, log, build_config)?; - - for output in outputs { - output.map_err(DfxError::new)?; - } + self.build(env, log, build_config)?; Ok(()) } From 84609b5a3f7ccf115afca4014aa3a1162845b777 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 28 Apr 2024 01:46:48 +0300 Subject: [PATCH 248/354] removed tracing --- src/dfx/src/lib/builders/motoko.rs | 2 ++ src/dfx/src/lib/graph/traverse_filtered.rs | 1 + src/dfx/src/lib/models/canister.rs | 14 +++++++------- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/dfx/src/lib/builders/motoko.rs b/src/dfx/src/lib/builders/motoko.rs index 6ad5d2a5c8..94bda978cd 100644 --- a/src/dfx/src/lib/builders/motoko.rs +++ b/src/dfx/src/lib/builders/motoko.rs @@ -102,6 +102,8 @@ pub fn add_imports( Some(canister.get_info()), )?; } + } else { + panic!("Canister '{}' not in pool", &canister_name); } } _ => {} diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 155dc71893..b3ee35b989 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -33,6 +33,7 @@ impl BfsFiltered { { while let Some(source_child_id) = &self.base.next(graph) { if predicate(source_child_id)? { + // FIXME: Item can have multiple parents. let mut source_parent_iter = graph.neighbors_directed(*source_child_id, Incoming); let mut source_parent_id; if let Some(id1) = source_parent_iter.next() { diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 2757eb49a2..3193aa0888 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -577,14 +577,14 @@ impl CanisterPool { .map(|canister| canister.get_info().get_name()) .contains(&canister.get_info().get_name()); if contains { - let canister_info = &canister.info; - // TODO: Ignored return value is a hack. - let _deps: Vec = - canister.builder.get_dependencies(self, canister_info)?; + canister + .builder + .read_dependencies(self, canister.get_info(), cache)?; // TODO: It is called multiple times during the flow. + // let canister_info = &canister.info; + // let _deps: Vec = + // canister.builder.get_dependencies(self, canister_info)?; } - canister - .builder - .read_dependencies(self, canister.get_info(), cache)?; // TODO: It is called multiple times during the flow. + // println!("read_dependecies for CASNITER {}", canister.get_name()); // FIXME: Remove. (Correct output.) } let source_graph = &self.imports.borrow().graph; From 67257e96df5e0ec73e97eaba7d584ca6ef7afcbb Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 28 Apr 2024 02:13:03 +0300 Subject: [PATCH 249/354] bug fix (introduced another bug) --- src/dfx/src/lib/graph/traverse_filtered.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index b3ee35b989..ac17154faf 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -2,7 +2,6 @@ use petgraph::{ data::DataMap, visit::{Bfs, IntoNeighborsDirected, VisitMap}, - Direction::Incoming, }; use crate::lib::error::DfxResult; @@ -33,8 +32,8 @@ impl BfsFiltered { { while let Some(source_child_id) = &self.base.next(graph) { if predicate(source_child_id)? { - // FIXME: Item can have multiple parents. - let mut source_parent_iter = graph.neighbors_directed(*source_child_id, Incoming); + // TODO: Simplify it using `skip()`. + let mut source_parent_iter = self.base.stack.iter().rev(); let mut source_parent_id; if let Some(id1) = source_parent_iter.next() { source_parent_id = id1; From 76a95d10f96f73de49101cc636ba5e1ab7af9621 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 28 Apr 2024 02:45:14 +0300 Subject: [PATCH 250/354] bug fix (introduced another bug) --- src/dfx/src/lib/graph/traverse_filtered.rs | 34 ++++++++++------------ 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index ac17154faf..a6e10e2a76 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -30,24 +30,22 @@ impl BfsFiltered { NodeId: Copy + Eq, VM: VisitMap, { - while let Some(source_child_id) = &self.base.next(graph) { - if predicate(source_child_id)? { - // TODO: Simplify it using `skip()`. - let mut source_parent_iter = self.base.stack.iter().rev(); - let mut source_parent_id; - if let Some(id1) = source_parent_iter.next() { - source_parent_id = id1; - loop { - if predicate(&source_parent_id)? { - call(&source_parent_id, source_child_id)?; - break; - } - if let Some(id2) = source_parent_iter.next() { - source_parent_id = id2; - } else { - break; - } - } + while let Some(child_id) = &self.base.next(graph) { + if predicate(&child_id)? { + let mut parent_iter = self.base.stack.iter().rev(); + let parent_id = + parent_iter + .find_map(|&id| -> Option> { + match predicate(&id) { + Ok(true) => Some(Ok(id)), + Ok(false) => None, + Err(err) => Some(Err(err)), + } + }) + .transpose()?; + if let Some(parent_id) = parent_id { + assert!(parent_id != *child_id); + call(&parent_id, child_id)?; } } } From 836a73768757dd477f4b36091ce87eae74f15aec Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 28 Apr 2024 04:07:46 +0300 Subject: [PATCH 251/354] removed tracing --- src/dfx/src/lib/models/canister.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 3193aa0888..64388633e4 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -584,7 +584,6 @@ impl CanisterPool { // let _deps: Vec = // canister.builder.get_dependencies(self, canister_info)?; } - // println!("read_dependecies for CASNITER {}", canister.get_name()); // FIXME: Remove. (Correct output.) } let source_graph = &self.imports.borrow().graph; @@ -827,6 +826,11 @@ impl CanisterPool { // TODO: The following `map` is a hack. let (graph, nodes) = self.build_canister_dependencies_graph(&toplevel_canisters.iter().map(|canister| canister.as_ref()).collect::>(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? + // FIXME: Remove: + println!("CANS: {:?}", graph.raw_edges().iter().map(|edge| ( + self.get_canister(graph.node_weight(edge.source()).unwrap()).unwrap().get_name(), + self.get_canister(graph.node_weight(edge.target()).unwrap()).unwrap().get_name(), + )).collect::>()); let toplevel_nodes: Vec = toplevel_canisters .iter() From af79910e82859a90a99505da884306e553cf5878 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 28 Apr 2024 04:49:38 +0300 Subject: [PATCH 252/354] alleged bug fix --- src/dfx/src/lib/graph/traverse_filtered.rs | 79 ++++++++++++++-------- src/dfx/src/lib/models/canister.rs | 6 +- 2 files changed, 54 insertions(+), 31 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index a6e10e2a76..897dac7130 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -1,54 +1,77 @@ // TODO: Somebody, adopt this code (and DFS) to `petgraph`. use petgraph::{ data::DataMap, - visit::{Bfs, IntoNeighborsDirected, VisitMap}, + visit::IntoNeighborsDirected, }; use crate::lib::error::DfxResult; -pub struct BfsFiltered { - base: Bfs, - // node_filter: P, -} +pub struct BfsFiltered {} -impl BfsFiltered { - pub fn new(base: Bfs) -> Self { - Self { base } +// FIXME: This is DFS, not BFS. +impl BfsFiltered +{ + pub fn new() -> Self { + Self { } } /// TODO: Refactor: Extract `iter` function from here. - pub fn traverse2( + pub fn traverse2( &mut self, graph: G, mut predicate: P, mut call: C, + node_id: NodeId, ) -> DfxResult<()> where C: FnMut(&NodeId, &NodeId) -> DfxResult<()>, G: IntoNeighborsDirected + DataMap, + NodeId: Copy + Eq, + P: FnMut(&NodeId) -> DfxResult, + { + self.traverse2_recursive( + graph, + &mut predicate, + &mut call, + node_id, + &mut Vec::new()) + } + + fn traverse2_recursive( + &mut self, + graph: G, + predicate: &mut P, + call: &mut C, + node_id: NodeId, + ancestors: &mut Vec, + ) -> DfxResult<()> + where + C: FnMut(&NodeId, &NodeId) -> DfxResult<()>, + G: IntoNeighborsDirected + DataMap, + NodeId: Copy + Eq, P: FnMut(&NodeId) -> DfxResult, NodeId: Copy + Eq, - VM: VisitMap, { - while let Some(child_id) = &self.base.next(graph) { - if predicate(&child_id)? { - let mut parent_iter = self.base.stack.iter().rev(); - let parent_id = - parent_iter - .find_map(|&id| -> Option> { - match predicate(&id) { - Ok(true) => Some(Ok(id)), - Ok(false) => None, - Err(err) => Some(Err(err)), - } - }) - .transpose()?; - if let Some(parent_id) = parent_id { - assert!(parent_id != *child_id); - call(&parent_id, child_id)?; - } - } + let ancestor_id = + ancestors.iter().rev() + .find_map(|&id| -> Option> { + match predicate(&id) { + Ok(true) => Some(Ok(id)), + Ok(false) => None, + Err(err) => Some(Err(err)), + } + }) + .transpose()?; + if let Some(ancestor_id) = ancestor_id { + assert!(ancestor_id != node_id); + call(&ancestor_id, &node_id)?; + } + ancestors.push(node_id); + for subnode_id in graph.neighbors(node_id) { + self.traverse2_recursive(graph, predicate, call, subnode_id, ancestors)?; } + ancestors.pop(); + Ok(()) } } diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 64388633e4..958fc864d5 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -623,8 +623,7 @@ impl CanisterPool { .or_insert_with(|| dest_graph.add_node(parent_canister_id)); dest_nodes.insert(parent_canister_id, parent_dest_id); - let bfs = Bfs::new(&source_graph, start_node); - let mut filtered_bfs = BfsFiltered::new(bfs); + let mut filtered_bfs = BfsFiltered::new(); filtered_bfs.traverse2( source_graph, |&s| { @@ -663,7 +662,8 @@ impl CanisterPool { dest_graph.update_edge(dest_parent_id, dest_child_id, ()); Ok(()) - } + }, + start_node, )?; } From 3b0042085d1825a1a8cc67cb427b73d28a99b2b2 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 28 Apr 2024 05:00:31 +0300 Subject: [PATCH 253/354] bug fixes --- src/dfx/src/lib/graph/traverse_filtered.rs | 7 +++++-- src/dfx/src/lib/models/canister.rs | 11 +++-------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 897dac7130..446d5ee7cf 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -6,10 +6,10 @@ use petgraph::{ use crate::lib::error::DfxResult; -pub struct BfsFiltered {} +pub struct DfsFiltered {} // FIXME: This is DFS, not BFS. -impl BfsFiltered +impl DfsFiltered { pub fn new() -> Self { Self { } @@ -52,6 +52,9 @@ impl BfsFiltered P: FnMut(&NodeId) -> DfxResult, NodeId: Copy + Eq, { + if !predicate(&node_id)? { + return Ok(()); + } let ancestor_id = ancestors.iter().rev() .find_map(|&id| -> Option> { diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index 958fc864d5..dae4c41f1e 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -5,7 +5,7 @@ use crate::lib::builders::{ use crate::lib::canister_info::CanisterInfo; use crate::lib::environment::Environment; use crate::lib::error::{BuildError, DfxError, DfxResult}; -use crate::lib::graph::traverse_filtered::BfsFiltered; +use crate::lib::graph::traverse_filtered::DfsFiltered; use crate::lib::metadata::dfx::DfxMetadata; use crate::lib::metadata::names::{CANDID_ARGS, CANDID_SERVICE, DFX}; use crate::lib::wasm::file::{compress_bytes, read_wasm_module}; @@ -623,8 +623,8 @@ impl CanisterPool { .or_insert_with(|| dest_graph.add_node(parent_canister_id)); dest_nodes.insert(parent_canister_id, parent_dest_id); - let mut filtered_bfs = BfsFiltered::new(); - filtered_bfs.traverse2( + let mut filtered_dfs = DfsFiltered::new(); + filtered_dfs.traverse2( source_graph, |&s| { let source_id = source_graph.node_weight(s); @@ -826,11 +826,6 @@ impl CanisterPool { // TODO: The following `map` is a hack. let (graph, nodes) = self.build_canister_dependencies_graph(&toplevel_canisters.iter().map(|canister| canister.as_ref()).collect::>(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? - // FIXME: Remove: - println!("CANS: {:?}", graph.raw_edges().iter().map(|edge| ( - self.get_canister(graph.node_weight(edge.source()).unwrap()).unwrap().get_name(), - self.get_canister(graph.node_weight(edge.target()).unwrap()).unwrap().get_name(), - )).collect::>()); let toplevel_nodes: Vec = toplevel_canisters .iter() From f568ffb9c0c903efea5f79ff37e7fcb6be28533f Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 28 Apr 2024 18:03:29 +0300 Subject: [PATCH 254/354] cargo fmt --- src/dfx/src/lib/graph/traverse_filtered.rs | 38 +++++++++------------- src/dfx/src/lib/models/canister.rs | 20 +++++++----- 2 files changed, 26 insertions(+), 32 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 446d5ee7cf..84f077dd5e 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -1,18 +1,14 @@ // TODO: Somebody, adopt this code (and DFS) to `petgraph`. -use petgraph::{ - data::DataMap, - visit::IntoNeighborsDirected, -}; +use petgraph::{data::DataMap, visit::IntoNeighborsDirected}; use crate::lib::error::DfxResult; pub struct DfsFiltered {} // FIXME: This is DFS, not BFS. -impl DfsFiltered -{ +impl DfsFiltered { pub fn new() -> Self { - Self { } + Self {} } /// TODO: Refactor: Extract `iter` function from here. @@ -29,12 +25,7 @@ impl DfsFiltered NodeId: Copy + Eq, P: FnMut(&NodeId) -> DfxResult, { - self.traverse2_recursive( - graph, - &mut predicate, - &mut call, - node_id, - &mut Vec::new()) + self.traverse2_recursive(graph, &mut predicate, &mut call, node_id, &mut Vec::new()) } fn traverse2_recursive( @@ -55,16 +46,17 @@ impl DfsFiltered if !predicate(&node_id)? { return Ok(()); } - let ancestor_id = - ancestors.iter().rev() - .find_map(|&id| -> Option> { - match predicate(&id) { - Ok(true) => Some(Ok(id)), - Ok(false) => None, - Err(err) => Some(Err(err)), - } - }) - .transpose()?; + let ancestor_id = ancestors + .iter() + .rev() + .find_map(|&id| -> Option> { + match predicate(&id) { + Ok(true) => Some(Ok(id)), + Ok(false) => None, + Err(err) => Some(Err(err)), + } + }) + .transpose()?; if let Some(ancestor_id) = ancestor_id { assert!(ancestor_id != node_id); call(&ancestor_id, &node_id)?; diff --git a/src/dfx/src/lib/models/canister.rs b/src/dfx/src/lib/models/canister.rs index dae4c41f1e..d2bbd04868 100644 --- a/src/dfx/src/lib/models/canister.rs +++ b/src/dfx/src/lib/models/canister.rs @@ -580,9 +580,9 @@ impl CanisterPool { canister .builder .read_dependencies(self, canister.get_info(), cache)?; // TODO: It is called multiple times during the flow. - // let canister_info = &canister.info; - // let _deps: Vec = - // canister.builder.get_dependencies(self, canister_info)?; + // let canister_info = &canister.info; + // let _deps: Vec = + // canister.builder.get_dependencies(self, canister_info)?; } } @@ -705,10 +705,7 @@ impl CanisterPool { } #[context("Failed step_prebuild_all.")] - fn step_prebuild_all( - &self, - build_config: &BuildConfig, - ) -> DfxResult<()> { + fn step_prebuild_all(&self, build_config: &BuildConfig) -> DfxResult<()> { // cargo audit if self .canisters_to_build(build_config) @@ -824,8 +821,13 @@ impl CanisterPool { ) -> DfxResult> { trace!(env.get_logger(), "Building dependencies graph."); // TODO: The following `map` is a hack. - let (graph, nodes) = - self.build_canister_dependencies_graph(&toplevel_canisters.iter().map(|canister| canister.as_ref()).collect::>(), env.get_cache().as_ref())?; // TODO: Can `clone` be eliminated? + let (graph, nodes) = self.build_canister_dependencies_graph( + &toplevel_canisters + .iter() + .map(|canister| canister.as_ref()) + .collect::>(), + env.get_cache().as_ref(), + )?; // TODO: Can `clone` be eliminated? let toplevel_nodes: Vec = toplevel_canisters .iter() From 36b0c51fdfe28e10c86c1919e121170a33d9d227 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Sun, 28 Apr 2024 18:07:07 +0300 Subject: [PATCH 255/354] removed unused self parameter --- src/dfx/src/lib/graph/traverse_filtered.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/dfx/src/lib/graph/traverse_filtered.rs b/src/dfx/src/lib/graph/traverse_filtered.rs index 84f077dd5e..bfb58577b8 100644 --- a/src/dfx/src/lib/graph/traverse_filtered.rs +++ b/src/dfx/src/lib/graph/traverse_filtered.rs @@ -25,11 +25,10 @@ impl DfsFiltered { NodeId: Copy + Eq, P: FnMut(&NodeId) -> DfxResult, { - self.traverse2_recursive(graph, &mut predicate, &mut call, node_id, &mut Vec::new()) + Self::traverse2_recursive(graph, &mut predicate, &mut call, node_id, &mut Vec::new()) } fn traverse2_recursive( - &mut self, graph: G, predicate: &mut P, call: &mut C, @@ -63,7 +62,7 @@ impl DfsFiltered { } ancestors.push(node_id); for subnode_id in graph.neighbors(node_id) { - self.traverse2_recursive(graph, predicate, call, subnode_id, ancestors)?; + Self::traverse2_recursive(graph, predicate, call, subnode_id, ancestors)?; } ancestors.pop(); From 6b3a965bb027dd01ac1e65f785ae5f3d53da1709 Mon Sep 17 00:00:00 2001 From: Victor Porton Date: Mon, 29 Apr 2024 19:31:09 +0300 Subject: [PATCH 256/354] make #3733 bug repeatable --- e2e/assets/wrong_ids/dfx.json | 92 +++ e2e/assets/wrong_ids/mops.toml | 27 + e2e/assets/wrong_ids/src/backend/lib.mo | 724 ++++++++++++++++++ e2e/assets/wrong_ids/src/backend/main.mo | 255 ++++++ e2e/assets/wrong_ids/src/backend/order.mo | 522 +++++++++++++ e2e/assets/wrong_ids/src/backend/payments.mo | 344 +++++++++ .../wrong_ids/src/backend/personhood.mo | 87 +++ e2e/assets/wrong_ids/src/backend/pst.mo | 86 +++ e2e/assets/wrong_ids/src/custom.d.ts | 4 + .../declarations/CanDBIndex/CanDBIndex.did | 83 ++ .../CanDBIndex/CanDBIndex.did.d.ts | 63 ++ .../declarations/CanDBIndex/CanDBIndex.did.js | 87 +++ .../src/declarations/CanDBIndex/index.d.ts | 50 ++ .../src/declarations/CanDBIndex/index.js | 42 + .../CanDBPartition/CanDBPartition.did | 341 +++++++++ .../CanDBPartition/CanDBPartition.did.d.ts | 267 +++++++ .../CanDBPartition/CanDBPartition.did.js | 428 +++++++++++ .../declarations/CanDBPartition/index.d.ts | 50 ++ .../src/declarations/CanDBPartition/index.js | 42 + .../declarations/NacDBIndex/NacDBIndex.did | 246 ++++++ .../NacDBIndex/NacDBIndex.did.d.ts | 231 ++++++ .../declarations/NacDBIndex/NacDBIndex.did.js | 353 +++++++++ .../src/declarations/NacDBIndex/index.d.ts | 50 ++ .../src/declarations/NacDBIndex/index.js | 42 + .../NacDBPartition/NacDBPartition.did | 319 ++++++++ .../NacDBPartition/NacDBPartition.did.d.ts | 318 ++++++++ .../NacDBPartition/NacDBPartition.did.js | 488 ++++++++++++ .../declarations/NacDBPartition/index.d.ts | 50 ++ .../src/declarations/NacDBPartition/index.js | 42 + .../src/declarations/frontend/frontend.did | 262 +++++++ .../declarations/frontend/frontend.did.d.ts | 240 ++++++ .../src/declarations/frontend/frontend.did.js | 286 +++++++ .../src/declarations/frontend/index.d.ts | 50 ++ .../src/declarations/frontend/index.js | 42 + .../src/declarations/ic_eth/ic_eth.did | 5 + .../src/declarations/ic_eth/ic_eth.did.d.ts | 11 + .../src/declarations/ic_eth/ic_eth.did.js | 20 + .../src/declarations/ic_eth/index.d.ts | 50 ++ .../src/declarations/ic_eth/index.js | 42 + .../declarations/internet_identity/index.d.ts | 50 ++ .../declarations/internet_identity/index.js | 42 + .../internet_identity/internet_identity.did | 620 +++++++++++++++ .../internet_identity.did.d.ts | 392 ++++++++++ .../internet_identity.did.js | 506 ++++++++++++ .../src/declarations/main/index.d.ts | 50 ++ .../wrong_ids/src/declarations/main/index.js | 42 + .../wrong_ids/src/declarations/main/main.did | 48 ++ .../src/declarations/main/main.did.d.ts | 49 ++ .../src/declarations/main/main.did.js | 58 ++ .../src/declarations/order/index.d.ts | 50 ++ .../wrong_ids/src/declarations/order/index.js | 42 + .../src/declarations/order/order.did | 34 + .../src/declarations/order/order.did.d.ts | 29 + .../src/declarations/order/order.did.js | 35 + .../src/declarations/payments/index.d.ts | 50 ++ .../src/declarations/payments/index.js | 42 + .../src/declarations/payments/payments.did | 21 + .../declarations/payments/payments.did.d.ts | 26 + .../src/declarations/payments/payments.did.js | 23 + .../src/declarations/personhood/index.d.ts | 50 ++ .../src/declarations/personhood/index.js | 42 + .../declarations/personhood/personhood.did | 35 + .../personhood/personhood.did.d.ts | 31 + .../declarations/personhood/personhood.did.js | 47 ++ .../wrong_ids/src/declarations/pst/index.d.ts | 50 ++ .../wrong_ids/src/declarations/pst/index.js | 42 + .../wrong_ids/src/declarations/pst/pst.did | 163 ++++ .../src/declarations/pst/pst.did.d.ts | 136 ++++ .../wrong_ids/src/declarations/pst/pst.did.js | 148 ++++ .../src/frontend/assets/.ic-assets.json5 | 56 ++ .../wrong_ids/src/frontend/assets/dfinity.svg | 46 ++ .../wrong_ids/src/frontend/assets/favicon.ico | Bin 0 -> 141426 bytes .../wrong_ids/src/frontend/assets/logo2.svg | 37 + .../wrong_ids/src/frontend/assets/main.css | 20 + .../src/frontend/assets/nfid-logo.svg | 39 + .../src/frontend/src/.ic-assets.json5 | 56 ++ .../src/frontend/src/DataDispatcher.ts | 4 + .../src/frontend/src/component/AllItems.tsx | 81 ++ .../src/frontend/src/component/App.tsx | 207 +++++ .../src/frontend/src/component/EditFolder.tsx | 150 ++++ .../src/component/EditFoldersList.tsx | 126 +++ .../src/frontend/src/component/EditItem.tsx | 165 ++++ .../src/frontend/src/component/ShowItem.tsx | 332 ++++++++ .../src/frontend/src/component/SubFolders.tsx | 90 +++ .../src/component/auth/use-auth-client.tsx | 93 +++ .../frontend/src/component/misc/ItemType.tsx | 15 + .../frontend/src/component/misc/UpDown.tsx | 135 ++++ .../src/component/personhood/Person.tsx | 224 ++++++ .../wrong_ids/src/frontend/src/config.json | 7 + .../src/frontend/src/config.json.example | 7 + .../wrong_ids/src/frontend/src/data/Data.ts | 194 +++++ .../wrong_ids/src/frontend/src/index.html | 23 + .../wrong_ids/src/frontend/src/index.tsx | 7 + .../src/frontend/src/our-canisters.json | 1 + .../wrong_ids/src/frontend/src/util/client.ts | 34 + .../wrong_ids/src/frontend/src/util/folder.ts | 25 + .../src/frontend/src/util/iterators.ts | 14 + .../wrong_ids/src/frontend/src/util/types.ts | 4 + .../wrong_ids/src/frontend/tsconfig.json | 12 + e2e/assets/wrong_ids/src/ic_eth/Cargo.toml | 20 + .../wrong_ids/src/ic_eth/abi/erc1155.json | 314 ++++++++ .../wrong_ids/src/ic_eth/abi/erc721.json | 215 ++++++ e2e/assets/wrong_ids/src/ic_eth/ic_eth.did | 5 + .../wrong_ids/src/ic_eth/src/eth_rpc.rs | 160 ++++ e2e/assets/wrong_ids/src/ic_eth/src/lib.rs | 83 ++ e2e/assets/wrong_ids/src/ic_eth/src/util.rs | 9 + .../src/libs/configs/db.config.example.mo | 12 + .../wrong_ids/src/libs/configs/db.config.mo | 12 + .../libs/configs/passport.config.example.mo | 13 + .../src/libs/configs/passport.config.mo | 13 + .../src/libs/helpers/fractions.helper.mo | 15 + .../src/libs/helpers/modifiers.helper.mo | 0 .../wrong_ids/src/storage/CanDBIndex.mo | 294 +++++++ .../wrong_ids/src/storage/CanDBPartition.mo | 194 +++++ .../wrong_ids/src/storage/NacDBIndex.mo | 194 +++++ .../wrong_ids/src/storage/NacDBPartition.mo | 251 ++++++ e2e/tests-dfx/wrong_ids.bash | 23 + 117 files changed, 13725 insertions(+) create mode 100644 e2e/assets/wrong_ids/dfx.json create mode 100644 e2e/assets/wrong_ids/mops.toml create mode 100644 e2e/assets/wrong_ids/src/backend/lib.mo create mode 100644 e2e/assets/wrong_ids/src/backend/main.mo create mode 100644 e2e/assets/wrong_ids/src/backend/order.mo create mode 100644 e2e/assets/wrong_ids/src/backend/payments.mo create mode 100644 e2e/assets/wrong_ids/src/backend/personhood.mo create mode 100644 e2e/assets/wrong_ids/src/backend/pst.mo create mode 100644 e2e/assets/wrong_ids/src/custom.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did create mode 100644 e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/CanDBIndex/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/CanDBIndex/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did create mode 100644 e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/CanDBPartition/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/CanDBPartition/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did create mode 100644 e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/NacDBIndex/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/NacDBIndex/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did create mode 100644 e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/NacDBPartition/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/NacDBPartition/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/frontend/frontend.did create mode 100644 e2e/assets/wrong_ids/src/declarations/frontend/frontend.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/frontend/frontend.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/frontend/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/frontend/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did create mode 100644 e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/ic_eth/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/ic_eth/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/internet_identity/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/internet_identity/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did create mode 100644 e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/main/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/main/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/main/main.did create mode 100644 e2e/assets/wrong_ids/src/declarations/main/main.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/main/main.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/order/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/order/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/order/order.did create mode 100644 e2e/assets/wrong_ids/src/declarations/order/order.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/order/order.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/payments/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/payments/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/payments/payments.did create mode 100644 e2e/assets/wrong_ids/src/declarations/payments/payments.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/payments/payments.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/personhood/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/personhood/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/personhood/personhood.did create mode 100644 e2e/assets/wrong_ids/src/declarations/personhood/personhood.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/personhood/personhood.did.js create mode 100644 e2e/assets/wrong_ids/src/declarations/pst/index.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/pst/index.js create mode 100644 e2e/assets/wrong_ids/src/declarations/pst/pst.did create mode 100644 e2e/assets/wrong_ids/src/declarations/pst/pst.did.d.ts create mode 100644 e2e/assets/wrong_ids/src/declarations/pst/pst.did.js create mode 100644 e2e/assets/wrong_ids/src/frontend/assets/.ic-assets.json5 create mode 100644 e2e/assets/wrong_ids/src/frontend/assets/dfinity.svg create mode 100644 e2e/assets/wrong_ids/src/frontend/assets/favicon.ico create mode 100644 e2e/assets/wrong_ids/src/frontend/assets/logo2.svg create mode 100644 e2e/assets/wrong_ids/src/frontend/assets/main.css create mode 100644 e2e/assets/wrong_ids/src/frontend/assets/nfid-logo.svg create mode 100644 e2e/assets/wrong_ids/src/frontend/src/.ic-assets.json5 create mode 100644 e2e/assets/wrong_ids/src/frontend/src/DataDispatcher.ts create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/AllItems.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/App.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/EditFolder.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/EditFoldersList.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/EditItem.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/ShowItem.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/SubFolders.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/auth/use-auth-client.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/misc/ItemType.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/misc/UpDown.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/component/personhood/Person.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/config.json create mode 100644 e2e/assets/wrong_ids/src/frontend/src/config.json.example create mode 100644 e2e/assets/wrong_ids/src/frontend/src/data/Data.ts create mode 100644 e2e/assets/wrong_ids/src/frontend/src/index.html create mode 100644 e2e/assets/wrong_ids/src/frontend/src/index.tsx create mode 100644 e2e/assets/wrong_ids/src/frontend/src/our-canisters.json create mode 100644 e2e/assets/wrong_ids/src/frontend/src/util/client.ts create mode 100644 e2e/assets/wrong_ids/src/frontend/src/util/folder.ts create mode 100644 e2e/assets/wrong_ids/src/frontend/src/util/iterators.ts create mode 100644 e2e/assets/wrong_ids/src/frontend/src/util/types.ts create mode 100644 e2e/assets/wrong_ids/src/frontend/tsconfig.json create mode 100644 e2e/assets/wrong_ids/src/ic_eth/Cargo.toml create mode 100644 e2e/assets/wrong_ids/src/ic_eth/abi/erc1155.json create mode 100644 e2e/assets/wrong_ids/src/ic_eth/abi/erc721.json create mode 100644 e2e/assets/wrong_ids/src/ic_eth/ic_eth.did create mode 100644 e2e/assets/wrong_ids/src/ic_eth/src/eth_rpc.rs create mode 100644 e2e/assets/wrong_ids/src/ic_eth/src/lib.rs create mode 100644 e2e/assets/wrong_ids/src/ic_eth/src/util.rs create mode 100644 e2e/assets/wrong_ids/src/libs/configs/db.config.example.mo create mode 100644 e2e/assets/wrong_ids/src/libs/configs/db.config.mo create mode 100644 e2e/assets/wrong_ids/src/libs/configs/passport.config.example.mo create mode 100644 e2e/assets/wrong_ids/src/libs/configs/passport.config.mo create mode 100644 e2e/assets/wrong_ids/src/libs/helpers/fractions.helper.mo create mode 100644 e2e/assets/wrong_ids/src/libs/helpers/modifiers.helper.mo create mode 100644 e2e/assets/wrong_ids/src/storage/CanDBIndex.mo create mode 100644 e2e/assets/wrong_ids/src/storage/CanDBPartition.mo create mode 100644 e2e/assets/wrong_ids/src/storage/NacDBIndex.mo create mode 100644 e2e/assets/wrong_ids/src/storage/NacDBPartition.mo create mode 100644 e2e/tests-dfx/wrong_ids.bash diff --git a/e2e/assets/wrong_ids/dfx.json b/e2e/assets/wrong_ids/dfx.json new file mode 100644 index 0000000000..1b78f8e37e --- /dev/null +++ b/e2e/assets/wrong_ids/dfx.json @@ -0,0 +1,92 @@ +{ + "canisters": { + "frontend": { + "frontend": { + "entrypoint": "src/frontend/src/index.html" + }, + "source": [ + "src/frontend/assets", + "dist/frontend/" + ], + "type": "assets", + "dependencies": ["ic_eth", "CanDBIndex", "order", "main"] + }, + "main": { + "main": "src/backend/main.mo", + "type": "motoko", + "dependencies": ["order", "CanDBIndex", "NacDBIndex"] + }, + "order": { + "main": "src/backend/order.mo", + "type": "motoko", + "dependencies": ["CanDBIndex", "NacDBIndex"] + }, + "personhood": { + "main": "src/backend/personhood.mo", + "type": "motoko", + "dependencies": ["ic_eth", "CanDBIndex", "order"] + }, + "payments": { + "main": "src/backend/payments.mo", + "type": "motoko", + "dependencies": ["pst"] + }, + "pst": { + "main": "src/backend/pst.mo", + "type": "motoko" + }, + "CanDBIndex": { + "main": "src/storage/CanDBIndex.mo", + "type": "motoko", + "dependencies": [] + }, + "NacDBIndex": { + "main": "src/storage/NacDBIndex.mo", + "type": "motoko", + "dependencies": [] + }, + "CanDBPartition": { + "main": "src/storage/CanDBPartition.mo", + "type": "motoko", + "deploy": false, + "dependencies": [] + }, + "NacDBPartition": { + "main": "src/storage/NacDBPartition.mo", + "type": "motoko", + "deploy": false, + "dependencies": [] + }, + "ic_eth": { + "type": "rust", + "candid": "src/ic_eth/ic_eth.did", + "package": "ic_eth" + }, + "internet_identity": { + "type": "custom", + "candid": "https://github.com/dfinity/internet-identity/releases/latest/download/internet_identity.did", + "wasm": "https://github.com/dfinity/internet-identity/releases/latest/download/internet_identity_dev.wasm.gz", + "shrink": false, + "remote": { + "candid": "internet_identity.did", + "id": { + "ic": "rdmx6-jaaaa-aaaaa-aaadq-cai" + } + } + } + }, + "defaults": { + "build": { + "args": "-fshared-code --actor-idl .dfx/local/lsp", + "packtool": "mops sources" + } + }, + "output_env_file": ".env", + "version": 1, + "networks": { + "local": { + "bind": "localhost:8000", + "type": "ephemeral" + } + } +} \ No newline at end of file diff --git a/e2e/assets/wrong_ids/mops.toml b/e2e/assets/wrong_ids/mops.toml new file mode 100644 index 0000000000..6467871289 --- /dev/null +++ b/e2e/assets/wrong_ids/mops.toml @@ -0,0 +1,27 @@ +[dependencies] +base = "0.11.1" +candb = "https://github.com/vporton/CanDB#no-blob" +nacdb = "0.10.20" +stable-rbtree = "1.0.0" +stable-buffer = "https://github.com/canscale/StableBuffer#v0.2.0" +stablebuffer = "https://github.com/canscale/StableBuffer#v0.2.0" +StableBuffer = "https://github.com/canscale/StableBuffer#v0.2.0" +icrc1 = "0.0.2" +itertools = "0.1.2" +StableTrieMap = "https://github.com/NatLabs/StableTrieMap#main" +array = "https://github.com/aviate-labs/array.mo#main" +xtendedNumbers = "https://github.com/edjCase/motoko_numbers#v1.1.0" +motoko-lib = "https://github.com/research-ag/motoko-lib#0.7" +map7 = "https://github.com/ZhenyaUsenko/motoko-hash-map#v7.0.0" +sha = "https://github.com/enzoh/motoko-sha#master" +stableheapbtreemap = "1.3.0" +map = "9.0.1" +btree = "https://github.com/canscale/StableHeapBTreeMap#v0.3.3" +CanDBMulti = "https://github.com/vporton/CanDBMulti#v0.10.0@86d00711216349ba4879370d6313d44a26d16f61" +xtended-numbers = "0.2.1" +"json.mo" = "https://github.com/aviate-labs/json.mo#master@afd30ed75095cb339c1ec187d61030bbd2e59ae6" +nacdb-reorder = "2.0.8" +passport-client = "0.4.6" + +[dev-dependencies] +matchers = "https://github.com/kritzcreek/motoko-matchers#v1.3.0" diff --git a/e2e/assets/wrong_ids/src/backend/lib.mo b/e2e/assets/wrong_ids/src/backend/lib.mo new file mode 100644 index 0000000000..bf8bdb9613 --- /dev/null +++ b/e2e/assets/wrong_ids/src/backend/lib.mo @@ -0,0 +1,724 @@ +import xNat "mo:xtendedNumbers/NatX"; +import Entity "mo:candb/Entity"; +import Text "mo:base/Text"; +import Debug "mo:base/Debug"; +import Buffer "mo:base/Buffer"; +import Principal "mo:base/Principal"; +import Int "mo:base/Int"; +import Nat32 "mo:base/Nat32"; +import Nat8 "mo:base/Nat8"; +import Blob "mo:base/Blob"; +import Char "mo:base/Char"; +import Nat64 "mo:base/Nat64"; +import Array "mo:base/Array"; +import Iter "mo:base/Iter"; +import Time "mo:base/Time"; +import Bool "mo:base/Bool"; +import Reorder "mo:nacdb-reorder/Reorder"; + +module { + // let phoneNumberVerificationCanisterId = "gzqxf-kqaaa-aaaak-qakba-cai"; // https://docs.nfid.one/developer/credentials/mobile-phone-number-credential + + // We will use that "-XXX" < "XXX" for any hex number XXX. + + func _toLowerHexDigit(v: Nat): Char { + Char.fromNat32(Nat32.fromNat( + if (v < 10) { + Nat32.toNat(Char.toNat32('0')) + v; + } else { + Nat32.toNat(Char.toNat32('a')) - 10 + v; + } + )); + }; + + func _fromLowerHexDigit(c: Char): Nat { + Nat32.toNat( + if (c <= '9') { + Char.toNat32(c) - Char.toNat32('0'); + } else { + Char.toNat32(c) - (Char.toNat32('a') - 10); + } + ); + }; + + func encodeBlob(g: Blob): Text { + var result = ""; + for (b in g.vals()) { + let b2 = Nat8.toNat(b); + result #= Text.fromChar(_toLowerHexDigit(b2 / 16)) # Text.fromChar(_toLowerHexDigit(b2 % 16)); + }; + result; + }; + + func decodeBlob(t: Text): Blob { + let buf = Buffer.Buffer(t.size() / 2); + let c = t.chars(); + label r loop { + let ?upper = c.next() else { + break r; + }; + let ?lower = c.next() else { + Debug.trap("decodeBlob: wrong hex number"); + }; + let b = Nat8.fromNat(_fromLowerHexDigit(upper) * 16 + _fromLowerHexDigit(lower)); + buf.add(b); + }; + Blob.fromArray(Buffer.toArray(buf)); + }; + + public func encodeNat(n: Nat): Text { + var n64 = Nat64.fromNat(n); + let buf = Buffer.Buffer(8); + for (i in Iter.range(0, 7)) { + buf.add(Nat8.fromNat(Nat64.toNat(n64 % 256))); + n64 >>= 8; + }; + let blob = Blob.fromArray(Array.reverse(Buffer.toArray(buf))); + encodeBlob(blob); + }; + + public func decodeNat(t: Text): Nat { + let blob = decodeBlob(t); + var result: Nat64 = 0; + for (b in blob.vals()) { + result <<= 8; + result += xNat.from8To64(b); + }; + Nat64.toNat(result); + }; + + // For integers less than 2**64 have the same lexigraphical sort order as the argument. + public func encodeInt(n: Int): Text { + assert n < 2**64; + if (n >= 0) { + encodeNat(Int.abs(n)); + } else { + "-" # encodeNat(2**64 - Int.abs(n)); + }; + }; + + public func decodeInt(t: Text): Int { + let iter = t.chars(); + if (iter.next() == ?'-') { + -(2**64 - decodeNat(Text.fromIter(iter))); + } else { + decodeNat(t); + } + }; + + let ITEM_TYPE_LINK = 0; + let ITEM_TYPE_MESSAGE = 1; + let ITEM_TYPE_POST = 2; + let ITEM_TYPE_FOLDER = 3; + + public type ItemDetails = { + #link : Text; + #message : (); + #post : (); // save post text separately + #folder : (); + }; + + public type ItemDataWithoutOwner = { + price: Float; + locale: Text; + title: Text; + description: Text; + details: ItemDetails; + }; + + // TODO: Add `license` field? + // TODO: Images. + // TODO: Item version. + // FIXME: Checking whether it's a folder in `order.mo` does not work. + /// The type of stored item in DB. + /// + /// Stored at `"i/" # ID` attribute `"i"`. + public type Item = { + #owned : ItemData; + #communal : { + isFolder: Bool; + timeStream: Reorder.Order; + votesStream: Reorder.Order; + }; + }; + + public func isFolder(item: Item): Bool { + switch (item) { + case (#owned data) { data.item.details == #folder }; + case (#communal data) { data.isFolder }; + }; + }; + + /// Used when requesting item data. + public type ItemTransfer = { + data: ItemData; + communal: Bool; + }; + + /// Used when creating new items. + public type ItemTransferWithoutOwner = { + data: ItemDataWithoutOwner; + communal: Bool; + }; + + public type ItemData = { + creator: Principal; + item: ItemDataWithoutOwner; + edited: Bool; + }; + + /// One of voted-for variants of a communal item. + /// + /// Stored at `"r/" # ID` attribute `"i"`. + /// + /// TODO: Use it. + public type ItemVariant = { + item: ItemDataWithoutOwner; + // itemRef: (Principal, Entity.SK); + }; + + // TODO: Does it make sense to keep `Streams` in lib? + public type StreamsLinks = Nat; + public let STREAM_LINK_SUBITEMS: StreamsLinks = 0; // folder <-> sub-items + public let STREAM_LINK_SUBFOLDERS: StreamsLinks = 1; // folder <-> sub-folders + public let STREAM_LINK_COMMENTS: StreamsLinks = 2; // item <-> comments + public let STREAM_LINK_MAX: StreamsLinks = STREAM_LINK_COMMENTS; + + public type Streams = [?Reorder.Order]; + + // TODO: messy order of the below functions + + func serializeItemDataWithoutOwnerToBuffer( + buf: Buffer.Buffer, + item: ItemDataWithoutOwner, + ) { + buf.add(#int (switch (item.details) { + case (#link v) { ITEM_TYPE_LINK }; + case (#message) { ITEM_TYPE_MESSAGE }; + case (#post _) { ITEM_TYPE_POST }; + case (#folder) { ITEM_TYPE_FOLDER }; + })); + buf.add(#float(item.price)); + buf.add(#text(item.locale)); + buf.add(#text(item.title)); + buf.add(#text(item.description)); + switch (item.details) { + case (#link v) { + buf.add(#text v); + }; + case _ {}; + }; + }; + + func deserializeItemDataWithoutOwnerFromBuffer(arr: [Entity.AttributeValuePrimitive], current: {var pos: Nat}) + : ItemDataWithoutOwner + { + var kind: Nat = 0; + var price = 0.0; + var locale = ""; + var title = ""; + var description = ""; + var link = ""; + + let res = label r: Bool { + switch (arr[current.pos]) { + case (#int v) { + kind := Int.abs(v); + current.pos += 1; + }; + case _ { break r false }; + }; + switch (arr[current.pos]) { + case (#float v) { + price := v; + current.pos += 1; + }; + case _ { break r false; }; + }; + switch (arr[current.pos]) { + case (#text v) { + locale := v; + current.pos += 1; + }; + case _ { break r false; }; + }; + switch (arr[current.pos]) { + case (#text v) { + title := v; + current.pos += 1; + }; + case _ { break r false; }; + }; + switch (arr[current.pos]) { + case (#text v) { + description := v; + current.pos += 1; + }; + case _ { break r false; } + }; + if (kind == ITEM_TYPE_LINK) { // TODO: Place it directly after `kind`? + switch (arr[current.pos]) { + case (#text v) { + link := v; + current.pos += 1; + }; + case _ { break r false; }; + }; + }; + + true; + }; + + if (not res) { + Debug.trap("wrong item format"); + }; + { + price = price; + locale = locale; + title = title; + description = description; + details = switch (kind) { + case (0) { #link link }; + case (1) { #message }; + case (2) { #post }; + case (3) { #folder }; + case _ { Debug.trap("wrong item format"); } + }; + }; + }; + + func serializeItemToBuffer( + buf: Buffer.Buffer, + item: Item, + ) { + switch (item) { + case (#owned ownedItem) { + buf.add(#int 0); + buf.add(#text(Principal.toText(ownedItem.creator))); + buf.add(#bool(ownedItem.edited)); + serializeItemDataWithoutOwnerToBuffer(buf, ownedItem.item); + }; + case (#communal {isFolder: Bool; timeStream: Reorder.Order; votesStream: Reorder.Order}) { + buf.add(#int 1); + buf.add(#int(if (isFolder) { 1 } else { 0 })); + buf.add(#text(Principal.toText(Principal.fromActor(timeStream.order.0)))); + buf.add(#int(timeStream.order.1)); + buf.add(#text(Principal.toText(Principal.fromActor(timeStream.reverse.0)))); + buf.add(#int(timeStream.reverse.1)); + buf.add(#text(Principal.toText(Principal.fromActor(votesStream.order.0)))); + buf.add(#int(votesStream.order.1)); + buf.add(#text(Principal.toText(Principal.fromActor(votesStream.reverse.0)))); + buf.add(#int(votesStream.reverse.1)); + }; + }; + }; + + func deserializeItemFromBuffer(arr: [Entity.AttributeValuePrimitive], current: {var pos: Nat}) + : Item + { + label r { + if (arr[current.pos] != #int 0) { // version marker + break r; + }; + current.pos += 1; + switch (arr[current.pos]) { + case (#int v) { + current.pos += 1; + switch (v) { + case (0) { + var creator = ""; + var edited = false; + switch (arr[current.pos]) { + case (#text c) { + creator := c; + current.pos += 1; + }; + case _ { break r }; + }; + switch (arr[current.pos]) { + case (#bool f) { + edited := f; + current.pos += 1; + }; + case _ { break r }; + }; + return #owned({ + creator = Principal.fromText(creator); + item = deserializeItemDataWithoutOwnerFromBuffer(arr, {var pos = current.pos}); + edited; + }); + }; + case (1) { + var folder = false; + switch (arr[current.pos]) { + case (#int 0) { folder := false; }; + case (#int 1) { folder := true; }; + case _ { break r }; + }; + current.pos += 1; + var order = ("", 0); + var reverse = ("", 0); + var order2 = ("", 0); + var reverse2 = ("", 0); + switch ((arr[current.pos], arr[current.pos+1], arr[current.pos+2], arr[current.pos+3])) { + case ((#text op1, #int on1, #text op2, #int on2)) { + order := (op1, Int.abs(on1)); + reverse := (op2, Int.abs(on2)); + current.pos += 4; + }; + case _ { break r }; + }; + switch ((arr[current.pos], arr[current.pos+1], arr[current.pos+2], arr[current.pos+3])) { + case ((#text op1, #int on1, #text op2, #int on2)) { + order2 := (op1, Int.abs(on1)); + reverse2 := (op2, Int.abs(on2)); + current.pos += 4; + }; + case _ { break r }; + }; + return #communal { + isFolder = folder; + timeStream = { order = (actor(order.0), order.1); reverse = (actor(reverse.0), reverse.1) }; + votesStream = { order = (actor(order2.0), order2.1); reverse = (actor(reverse2.0), reverse2.1) }; + }; + }; + case _ { break r }; + } + }; + case _ { break r }; + }; + }; + Debug.trap("wrong item format"); + }; + + // FIXME: Should be `Item` instead of `ItemData`. + public func serializeItem(item: Item): Entity.AttributeValue { + let buf = Buffer.Buffer(8); + buf.add(#int 0); // version + serializeItemToBuffer(buf, item); + #tuple(Buffer.toArray(buf)); + }; + + public func deserializeItem(attr: Entity.AttributeValue): Item { + var pos = 0; + label r switch (attr) { + case (#tuple arr) { + return deserializeItemFromBuffer(arr, {var pos}); + }; + case _ {}; + }; + Debug.trap("wrong item format"); + }; + + public func serializeItemVariant(item: ItemVariant): Entity.AttributeValue { + let buf = Buffer.Buffer(8); + buf.add(#int 0); // version + serializeItemDataWithoutOwnerToBuffer(buf, item.item); + #tuple(Buffer.toArray(buf)); + }; + + public func deserializeItemVariant(attr: Entity.AttributeValue): ItemVariant { + var pos = 0; + switch (attr) { + case (#tuple arr) { + switch (arr[pos]) { + case (#int v) { + assert v == 0; + pos += 1; + }; + case _ { Debug.trap("wrong item format"); }; + }; + return { + item = deserializeItemDataWithoutOwnerFromBuffer(arr, {var pos}); + }; + }; + case _ { + Debug.trap("wrong item format"); + }; + }; + }; + + public func serializeStreams(streams: Streams): Entity.AttributeValue { + let buf = Buffer.Buffer(18); + for(item in streams.vals()) { + switch (item) { + case (?r) { + buf.add(#text(Principal.toText(Principal.fromActor(r.order.0)))); + buf.add(#int(r.order.1)); + buf.add(#text(Principal.toText(Principal.fromActor(r.reverse.0)))); + buf.add(#int(r.reverse.1)); + }; + case null { + buf.add(#int(-1)); + } + } + }; + #tuple(Buffer.toArray(buf)); + }; + + public func deserializeStreams(attr: Entity.AttributeValue): Streams { + let s = Buffer.Buffer(36); + let #tuple arr = attr else { + Debug.trap("programming error"); + }; + var i = 0; + label w while (i != Array.size(arr)) { + if (arr[i] == #int(-1)) { + s.add(null); + i += 1; + continue w; + }; + switch (arr[i], arr[i+1], arr[i+2], arr[i+3]) { + case (#text c0, #int i0, #text c1, #int i1) { + i += 4; + s.add( + ?{ order = (actor(c0), Int.abs(i0)); reverse = (actor(c1), Int.abs(i1)) }, + ); + }; + case _ { + Debug.trap("programming error"); + } + }; + }; + + Buffer.toArray(s); + }; + + // TODO: Use this. + public type Karma = { + earnedVotes: Nat; + remainingBonusVotes: Nat; + lastBonusUpdated: Time.Time; + }; + + public func serializeKarma(karma: Karma): Entity.AttributeValue { + #tuple([ + #int(0), // version + #int(karma.earnedVotes), + #int(karma.remainingBonusVotes), + #int(karma.lastBonusUpdated), + ]); + }; + + public func deserializeKarma(attr: Entity.AttributeValue): Karma { + let res = label r { + switch (attr) { + case (#tuple arr) { + let a: [var Nat] = Array.tabulateVar(3, func _ = 0); + switch (arr[0]) { + case (#int v) { + assert v == 0; + }; + case _ { Debug.trap("Wrong karma version"); }; + }; + for (i in Iter.range(0,2)) { + switch (arr[i+1]) { + case (#int elt) { + a[i] := Int.abs(elt); + }; + case _ { break r; }; + }; + return { + earnedVotes = a[0]; + remainingBonusVotes = a[1]; + lastBonusUpdated = a[2]; + }; + }; + }; + case _ { break r; }; + }; + }; + Debug.trap("wrong votes format"); + }; + + public func onlyItemOwner(caller: Principal, item: Item) { + switch (item) { + case (#owned data) { + if (caller != data.creator) { + Debug.trap("can't change item owner"); + }; + }; + case (#communal _) { + Debug.trap("can't directly edit a communal item"); + }; + }; + }; + + /// More user info: Voting /// + + // TODO: Also store, how much votings were done. + public type VotingScore = { + points: Float; // Gitcoin score + lastChecked: Time.Time; + ethereumAddress: Text; // TODO: Store in binary + }; + + public func serializeVoting(voting: VotingScore): Entity.AttributeValue { + let buf = Buffer.Buffer(4); + buf.add(#int 0); // version + buf.add(#bool true); + buf.add(#float(voting.points)); + buf.add(#int(voting.lastChecked)); + buf.add(#text(voting.ethereumAddress)); + #tuple(Buffer.toArray(buf)); + }; + + public func deserializeVoting(attr: Entity.AttributeValue): VotingScore { + var isScore: Bool = false; + var points: Float = 0.0; + var lastChecked: Time.Time = 0; + var ethereumAddress: Text = ""; + + let res = label r: Bool switch (attr) { + case (#tuple arr) { + var pos: Nat = 0; + switch (arr[pos]) { + case (#int v) { + assert v == 0; + }; + case _ { break r false }; + }; + pos += 1; + switch (arr[pos]) { + case (#bool v) { + isScore := v; + }; + case _ { break r false }; + }; + pos += 1; + if (isScore) { + switch (arr[pos]) { + case (#float v) { + points := v; + }; + case _ { break r false }; + }; + pos += 1; + switch (arr[pos]) { + case (#int v) { + lastChecked := v; + }; + case _ { break r false }; + }; + pos += 1; + switch (arr[pos]) { + case (#text v) { + ethereumAddress := v; + }; + case _ { break r false }; + }; + pos += 1; + }; + true; + }; + case _ { break r false }; + }; + if (not res) { + Debug.trap("cannot deserialize Voting"); + }; + {points; lastChecked; ethereumAddress}; + }; + + /// Users /// + + public type User = { + locale: Text; + nick: Text; + title: Text; + description: Text; + // TODO: long description + link : Text; + }; + + public func serializeUser(user: User): Entity.AttributeValue { + var buf = Buffer.Buffer(6); + buf.add(#int 0); // version + buf.add(#text (user.locale)); + buf.add(#text (user.nick)); + buf.add(#text (user.title)); + buf.add(#text (user.description)); + buf.add(#text (user.link)); + #tuple (Buffer.toArray(buf)); + }; + + public func deserializeUser(attr: Entity.AttributeValue): User { + var locale = ""; + var nick = ""; + var title = ""; + var description = ""; + var link = ""; + let res = label r: Bool switch (attr) { + case (#tuple arr) { + var pos = 0; + while (pos < arr.size()) { + switch (pos) { + case (0) { + switch (arr[pos]) { + case (#int v) { + assert v == 0; // version + }; + case _ { break r false }; + }; + }; + case (1) { + switch (arr[pos]) { + case (#text v) { + locale := v; + }; + case _ { break r false }; + }; + }; + case (2) { + switch (arr[pos]) { + case (#text v) { + nick := v; + }; + case _ { break r false }; + }; + }; + case (3) { + switch (arr[pos]) { + case (#text v) { + title := v; + }; + case _ { break r false }; + }; + }; + case (4) { + switch (arr[pos]) { + case (#text v) { + description := v; + }; + case _ { break r false }; + }; + }; + case (5) { + switch (arr[pos]) { + case (#text v) { + link := v; + }; + case _ { break r false }; + }; + }; + case _ { break r false; }; + }; + pos += 1; + }; + true; + }; + case _ { + false; + }; + }; + if (not res) { + Debug.trap("wrong user format"); + }; + { + locale = locale; + nick = nick; + title = title; + description = description; + link = link; + }; + }; +} \ No newline at end of file diff --git a/e2e/assets/wrong_ids/src/backend/main.mo b/e2e/assets/wrong_ids/src/backend/main.mo new file mode 100644 index 0000000000..92f70d25cf --- /dev/null +++ b/e2e/assets/wrong_ids/src/backend/main.mo @@ -0,0 +1,255 @@ +import Nac "mo:nacdb/NacDB"; +import Principal "mo:base/Principal"; +import Debug "mo:base/Debug"; +import Text "mo:base/Text"; +import Nat "mo:base/Nat"; +import Buffer "mo:base/Buffer"; +import Array "mo:base/Array"; +import Reorder "mo:nacdb-reorder/Reorder"; +import order "canister:order"; +import GUID "mo:nacdb/GUID"; +import Entity "mo:candb/Entity"; +import Itertools "mo:itertools/Iter"; + +import CanDBIndex "canister:CanDBIndex"; +import NacDBIndex "canister:NacDBIndex"; +import CanDBPartition "../storage/CanDBPartition"; +import MyCycles "mo:nacdb/Cycles"; +import DBConfig "../libs/configs/db.config"; +import lib "lib"; +// import ICRC1Types "mo:icrc1/ICRC1/Types"; + +shared actor class ZonBackend() = this { + /// External Canisters /// + + /// Some Global Variables /// + stable let guidGen = GUID.init(Array.tabulate(16, func _ = 0)); // FIXME: Gather randomness. + + stable let orderer = Reorder.createOrderer({queueLengths = 20}); // TODO: What's the number? + + // See ARCHITECTURE.md for database structure + + // TODO: Avoid duplicate user nick names. + + stable var maxId: Nat = 0; + + stable var founder: ?Principal = null; + + /// Initialization /// + + stable var initialized: Bool = false; + + public shared({ caller }) func init(): async () { + ignore MyCycles.topUpCycles(DBConfig.dbOptions.partitionCycles); + + if (initialized) { + Debug.trap("already initialized"); + }; + + founder := ?caller; + + initialized := true; + }; + + /// Owners /// + + func onlyMainOwner(caller: Principal) { + if (?caller != founder) { + Debug.trap("not the main owner"); + } + }; + + public shared({caller}) func setMainOwner(_founder: Principal) { + onlyMainOwner(caller); + + founder := ?_founder; + }; + + // TODO: probably, superfluous. + public shared({caller}) func removeMainOwner() { + onlyMainOwner(caller); + + founder := null; + }; + + public shared({caller}) func setUserData(partitionId: ?Principal, user: lib.User) { + let key = "u/" # Principal.toText(caller); // TODO: Should use binary encoding. + // TODO: Add Hint to CanDBMulti + ignore await CanDBIndex.putAttributeNoDuplicates("user", { + sk = key; + key = "u"; + value = lib.serializeUser(user); + }, + ); + }; + + // TODO: Should also remove all his/her items? + public shared({caller}) func removeUser(canisterId: Principal) { + var db: CanDBPartition.CanDBPartition = actor(Principal.toText(canisterId)); + let key = "u/" # Principal.toText(caller); + await db.delete({sk = key}); + }; + + /// Items /// + + stable var rootItem: ?(CanDBPartition.CanDBPartition, Nat) = null; + + public shared({caller}) func setRootItem(part: Principal, id: Nat) + : async () + { + onlyMainOwner(caller); + + rootItem := ?(actor(Principal.toText(part)), id); + }; + + public query func getRootItem(): async ?(Principal, Nat) { + do ? { + let (part, n) = rootItem!; + (Principal.fromActor(part), n); + }; + }; + + public shared({caller}) func createItemData(item: lib.ItemTransferWithoutOwner) + : async (Principal, Nat) + { + let (canisterId, itemId) = if (item.communal) { + let variant: lib.ItemVariant = { creator = caller; item = item.data; }; + let variantId = maxId; + maxId += 1; + let variantKey = "r/" # Nat.toText(variantId); + let variantCanisterId = await CanDBIndex.putAttributeWithPossibleDuplicate( + "main", { sk = variantKey; key = "i"; value = lib.serializeItemVariant(variant) } + ); + let itemId = maxId; + maxId += 1; + let itemKey = "i/" # Nat.toText(itemId); + let timeStream = await* Reorder.createOrder(GUID.nextGuid(guidGen), NacDBIndex, orderer, ?10000); // FIXME: max length + let votesStream = await* Reorder.createOrder(GUID.nextGuid(guidGen), NacDBIndex, orderer, ?10000); // FIXME: max length + let item2 = #communal { timeStream; votesStream; isFolder = item.data.details == #folder }; + let variantValue = Nat.toText(variantId) # "@" # Principal.toText(variantCanisterId); + await* Reorder.add(GUID.nextGuid(guidGen), NacDBIndex, orderer, { + hardCap = ?100; key = -2; order = votesStream; value = variantValue; // TODO: Take position `key` configurable. + }); + + // Put variant in time stream // TODO: duplicate code + let scanResult = await timeStream.order.0.scanLimitOuter({ + dir = #fwd; + outerKey = timeStream.order.1; + lowerBound = ""; + upperBound = "x"; + limit = 1; + ascending = ?true; + }); + let timeScanSK = if (scanResult.results.size() == 0) { // empty list + 0; + } else { + let t = scanResult.results[0].0; + let n = lib.decodeInt(Text.fromIter(Itertools.takeWhile(t.chars(), func (c: Char): Bool { c != '#' }))); + n - 1; + }; + let guid = GUID.nextGuid(guidGen); + // TODO: race condition + await* Reorder.add(guid, NacDBIndex, orderer, { + order = timeStream; + key = timeScanSK; + value = variantValue; + hardCap = DBConfig.dbOptions.hardCap; + }); + + let itemCanisterId = await CanDBIndex.putAttributeWithPossibleDuplicate( + "main", { sk = itemKey; key = "i"; value = lib.serializeItem(item2) } + ); + (itemCanisterId, itemId); + } else { + let item2: lib.Item = #owned { creator = caller; item = item.data; edited = false }; + let itemId = maxId; + maxId += 1; + let key = "i/" # Nat.toText(itemId); + let canisterId = await CanDBIndex.putAttributeWithPossibleDuplicate( + "main", { sk = key; key = "i"; value = lib.serializeItem(item2) } + ); + (canisterId, itemId); + }; + + await order.insertIntoAllTimeStream((canisterId, itemId)); + (canisterId, itemId); + }; + + // We don't check that owner exists: If a user lost his/her item, that's his/her problem, not ours. + public shared({caller}) func setItemData(canisterId: Principal, _itemId: Nat, item: lib.ItemDataWithoutOwner) { + var db: CanDBPartition.CanDBPartition = actor(Principal.toText(canisterId)); + let key = "i/" # Nat.toText(_itemId); // TODO: better encoding + switch (await db.getAttribute({sk = key}, "i")) { + case (?oldItemRepr) { + let oldItem = lib.deserializeItem(oldItemRepr); + let item2: lib.ItemData = { item = item; creator = caller; edited = true }; // TODO: edited only if actually changed + lib.onlyItemOwner(caller, oldItem); // also rejects changing communal items. + await db.putAttribute({sk = key; key = "i"; value = lib.serializeItem(#owned item2)}); + }; + case null { Debug.trap("no item") }; + }; + }; + + public shared({caller}) func setPostText(canisterId: Principal, _itemId: Nat, text: Text) { + var db: CanDBPartition.CanDBPartition = actor(Principal.toText(canisterId)); + let key = "i/" # Nat.toText(_itemId); // TODO: better encoding + switch (await db.getAttribute({sk = key}, "i")) { + case (?oldItemRepr) { + let oldItem = lib.deserializeItem(oldItemRepr); + lib.onlyItemOwner(caller, oldItem); + switch (oldItem) { + case (#owned data) { + switch (data.item.details) { + case (#post) {}; + case _ { Debug.trap("not a post"); }; + }; + }; + case (#communal _) { Debug.trap("programming error") }; + }; + await db.putAttribute({ sk = key; key = "t"; value = #text(text) }); + }; + case _ { Debug.trap("no item") }; + }; + }; + + // TODO: Also remove voting data. + public shared({caller}) func removeItem(canisterId: Principal, _itemId: Nat) { + // We first remove links, then the item itself, in order to avoid race conditions when displaying. + await order.removeItemLinks((canisterId, _itemId)); + var db: CanDBPartition.CanDBPartition = actor(Principal.toText(canisterId)); + let key = "i/" # Nat.toText(_itemId); + let ?oldItemRepr = await db.getAttribute({sk = key}, "i") else { + Debug.trap("no item"); + }; + let oldItem = lib.deserializeItem(oldItemRepr); + // if (oldItem.item.communal) { // FIXME + // Debug.trap("it's communal"); + // }; + lib.onlyItemOwner(caller, oldItem); + await db.delete({sk = key}); + }; + + // TODO: Set maximum lengths on user nick, chirp length, etc. + + /// Affiliates /// + + // public shared({caller}) func setAffiliate(canister: Principal, buyerAffiliate: ?Principal, sellerAffiliate: ?Principal): async () { + // var db: CanDBPartition.CanDBPartition = actor(Principal.toText(canister)); + // if (buyerAffiliate == null and sellerAffiliate == null) { + // await db.delete({sk = "a/" # Principal.toText(caller)}); + // }; + // let buyerAffiliateStr = switch (buyerAffiliate) { + // case (?user) { Principal.toText(user) }; + // case (null) { "" } + // }; + // let sellerAffiliateStr = switch (sellerAffiliate) { + // case (?user) { Principal.toText(user) }; + // case (null) { "" } + // }; + // // await db.put({sk = "a/" # Principal.toText(caller); attributes = [("v", #text (buyerAffiliateStr # "/" # sellerAffiliateStr))]}); + // }; + + public shared func get_trusted_origins(): async [Text] { + return []; + }; +} diff --git a/e2e/assets/wrong_ids/src/backend/order.mo b/e2e/assets/wrong_ids/src/backend/order.mo new file mode 100644 index 0000000000..8935ca6b2c --- /dev/null +++ b/e2e/assets/wrong_ids/src/backend/order.mo @@ -0,0 +1,522 @@ +import Debug "mo:base/Debug"; +import Nat "mo:base/Nat"; +import Char "mo:base/Char"; +import Text "mo:base/Text"; +import Nat8 "mo:base/Nat8"; +import Principal "mo:base/Principal"; +import Int "mo:base/Int"; +import Array "mo:base/Array"; +import Bool "mo:base/Bool"; +import Float "mo:base/Float"; +import V "mo:passport-client/lib/Verifier"; +import PCB "mo:passport-client/backend"; + +import Itertools "mo:itertools/Iter"; +import Nac "mo:nacdb/NacDB"; +import GUID "mo:nacdb/GUID"; +import CanDBIndex "canister:CanDBIndex"; +import CanDBPartition "../storage/CanDBPartition"; +import NacDBIndex "canister:NacDBIndex"; +import Reorder "mo:nacdb-reorder/Reorder"; +import MyCycles "mo:nacdb/Cycles"; +import lib "lib"; +import DBConfig "../libs/configs/db.config"; + +// TODO: Delete "hanging" items (as soon, as they found) + +shared({caller = initialOwner}) actor class Orders() = this { + stable var owners = [initialOwner]; + + func checkCaller(caller: Principal) { + if (Array.find(owners, func(e: Principal): Bool { e == caller; }) == null) { + Debug.trap("order: not allowed"); + } + }; + + public shared({caller = caller}) func setOwners(_owners: [Principal]): async () { + checkCaller(caller); + + owners := _owners; + }; + + public query func getOwners(): async [Principal] { owners }; + + stable var initialized: Bool = false; + + // stable var rng: Prng.Seiran128 = Prng.Seiran128(); // WARNING: This is not a cryptographically secure pseudorandom number generator. + stable let guidGen = GUID.init(Array.tabulate(16, func _ = 0)); // FIXME: Gather randomness. + + stable let orderer = Reorder.createOrderer({queueLengths = 20}); // TODO: What's the number? + + public shared({ caller }) func init(_owners: [Principal]): async () { + checkCaller(caller); + ignore MyCycles.topUpCycles(DBConfig.dbOptions.partitionCycles); // TODO: another number of cycles? + if (initialized) { + Debug.trap("already initialized"); + }; + + owners := _owners; + MyCycles.addPart(DBConfig.dbOptions.partitionCycles); + + initialized := true; + }; + + func addItemToList(theSubDB: Reorder.Order, itemToAdd: (Principal, Nat), side: { #beginning; #end; #zero }): async* () { + let scanItemInfo = Nat.toText(itemToAdd.1) # "@" # Principal.toText(itemToAdd.0); + let theSubDB2: Nac.OuterCanister = theSubDB.order.0; + if (await theSubDB2.hasByOuter({outerKey = theSubDB.reverse.1; sk = scanItemInfo})) { + return; // prevent duplicate + }; + // TODO: race + + // TODO: duplicate code + + let timeScanSK = if (side == #zero) { + 0; + } else { + let scanResult = await theSubDB2.scanLimitOuter({ + dir = if (side == #end) { #bwd } else { #fwd }; + outerKey = theSubDB.order.1; + lowerBound = ""; + upperBound = "x"; + limit = 1; + ascending = ?(if (side == #end) { false } else { true }); + }); + let timeScanSK = if (scanResult.results.size() == 0) { // empty list + 0; + } else { + let t = scanResult.results[0].0; + let n = lib.decodeInt(Text.fromIter(Itertools.takeWhile(t.chars(), func (c: Char): Bool { c != '#' }))); + if (side == #end) { n + 1 } else { n - 1 }; + }; + timeScanSK; + }; + + let guid = GUID.nextGuid(guidGen); + + // TODO: race condition + await* Reorder.add(guid, NacDBIndex, orderer, { + order = theSubDB; + key = timeScanSK; + value = scanItemInfo; + hardCap = DBConfig.dbOptions.hardCap; + }); + }; + + // Public API // + + public shared({caller}) func addItemToFolder( + catId: (Principal, Nat), + itemId: (Principal, Nat), + comment: Bool, + side: { #beginning; #end }, // ignored unless adding to an owned folder + ): async () { + let catId1: CanDBPartition.CanDBPartition = actor(Principal.toText(catId.0)); + let itemId1: CanDBPartition.CanDBPartition = actor(Principal.toText(itemId.0)); + + // TODO: Race condition when adding an item. + // TODO: Ensure that it is retrieved once. + let ?folderItemData = await catId1.getAttribute({sk = "i/" # Nat.toText(catId.1)}, "i") else { + Debug.trap("cannot get folder item"); + }; + let folderItem = lib.deserializeItem(folderItemData); + + // if (not folderItem.item.communal) { // FIXME + // lib.onlyItemOwner(caller, folderItem); + // }; + if (not lib.isFolder(folderItem) and not comment) { + Debug.trap("not a folder"); + }; + let links = await* getStreamLinks(itemId, comment); + await* addToStreams(catId, itemId, comment, links, itemId1, "st", "rst", #beginning); + if (lib.isFolder(folderItem)) { + await* addToStreams(catId, itemId, comment, links, itemId1, "sv", "rsv", side); + } else { + await* addToStreams(catId, itemId, comment, links, itemId1, "sv", "rsv", #end); + }; + }; + + /// `key1` and `key2` are like `"st"` and `"rst"` + func addToStreams( + catId: (Principal, Nat), + itemId: (Principal, Nat), + comment: Bool, // FIXME: Use it. + links: lib.StreamsLinks, + itemId1: CanDBPartition.CanDBPartition, + key1: Text, + key2: Text, + side: { #beginning; #end; #zero }, + ): async* () { + // Put into the beginning of time order. + let streams1 = await* itemsStream(catId, key1); + let streams2 = await* itemsStream(itemId, key2); + let streamsVar1: [var ?Reorder.Order] = switch (streams1) { + case (?streams) { Array.thaw(streams) }; + case null { [var null, null, null]}; + }; + let streamsVar2: [var ?Reorder.Order] = switch (streams2) { + case (?streams) { Array.thaw(streams) }; + case null { [var null, null, null]}; + }; + let streams1t = switch (streams1) { + case (?t) { t[links] }; + case (null) { null }; + }; + let stream1 = switch (streams1t) { + case (?stream) { stream }; + case null { + let v = await* Reorder.createOrder(GUID.nextGuid(guidGen), NacDBIndex, orderer, ?10000); + streamsVar1[links] := ?v; + v; + }; + }; + let streams2t = switch (streams2) { + case (?t) { t[links] }; + case (null) { null }; + }; + let stream2 = switch (streams2t) { + case (?stream) { stream }; + case null { + let v = await* Reorder.createOrder(GUID.nextGuid(guidGen), NacDBIndex, orderer, ?10000); + streamsVar2[links] := ?v; + v; + }; + }; + await* addItemToList(stream1, itemId, side); + await* addItemToList(stream2, catId, side); + let itemData1 = lib.serializeStreams(Array.freeze(streamsVar1)); + let itemData2 = lib.serializeStreams(Array.freeze(streamsVar2)); + await itemId1.putAttribute({ sk = "i/" # Nat.toText(catId.1); key = key1; value = itemData1 }); + await itemId1.putAttribute({ sk = "i/" # Nat.toText(itemId.1); key = key2; value = itemData2 }); + }; + + public shared({caller}) func removeItemLinks(itemId: (Principal, Nat)): async () { + // checkCaller(caller); // FIXME: Uncomment. + await* _removeItemLinks(itemId); + }; + + func _removeItemLinks(itemId: (Principal, Nat)): async* () { + // FIXME: Also delete the other end. + await* _removeStream("st", itemId); + await* _removeStream("sv", itemId); + await* _removeStream("rst", itemId); + await* _removeStream("rsv", itemId); + // await* _removeStream("stc", itemId); + // await* _removeStream("vsc", itemId); + // await* _removeStream("rstc", itemId); + // await* _removeStream("rsvc", itemId); + + }; + + /// Removes a stream + /// TODO: Race condition on removing first links in only one direction. Check for more race conditions. + func _removeStream(kind: Text, itemId: (Principal, Nat)): async* () { + let directStream = await* itemsStream(itemId, kind); + switch (directStream) { + case (?directStream) { + for (index in directStream.keys()) { + switch (directStream[index]) { + case (?directOrder) { + let value = Nat.toText(itemId.1) # "@" # Principal.toText(itemId.0); + let reverseKind = if (kind.chars().next() == ?'r') { + let iter = kind.chars(); + ignore iter.next(); + Text.fromIter(iter); + } else { + "r" # kind; + }; + // Delete links pointing to us: + // TODO: If more than 100_000? + let result = await directOrder.order.0.scanLimitOuter({outerKey = directOrder.order.1; lowerBound = ""; upperBound = "x"; dir = #fwd; limit = 100_000}); + for (p in result.results.vals()) { + let #text q = p.1 else { + Debug.trap("order: programming error"); + }; + // TODO: Extract this to a function: + let words = Text.split(q, #char '@'); // a bit inefficient + let w1o = words.next(); + let w2o = words.next(); + let (?w1, ?w2) = (w1o, w2o) else { + Debug.trap("order: programming error"); + }; + let ?w1i = Nat.fromText(w1) else { + Debug.trap("order: programming error"); + }; + let reverseStream = await* itemsStream((Principal.fromText(w2), w1i), reverseKind); + switch (reverseStream) { + case (?reverseStream) { + switch (reverseStream[index]) { + case (?reverseOrder) { + Debug.print("q=" # q # ", parent=" # debug_show(w1i) # "@" # w2 # ", kind=" # reverseKind); + await* Reorder.delete(GUID.nextGuid(guidGen), NacDBIndex, orderer, { order = reverseOrder; value }); + }; + case null {}; + }; + }; + case null {}; + }; + }; + // Delete our own sub-DB (before deleting the item itself): + await directOrder.order.0.deleteSubDBOuter({outerKey = directOrder.order.1}); + }; + case null {}; + } + }; + }; + case null {}; + }; + }; + + func getStreamLinks(/*catId: (Principal, Nat),*/ itemId: (Principal, Nat), comment: Bool) + : async* lib.StreamsLinks + { + // let catId1: CanDBPartition.CanDBPartition = actor(Principal.toText(catId.0)); + let itemId1: CanDBPartition.CanDBPartition = actor(Principal.toText(itemId.0)); + // TODO: Ensure that item data is readed once per `addItemToFolder` call. + let ?childItemData = await itemId1.getAttribute({sk = "i/" # Nat.toText(itemId.1)}, "i") else { + // TODO: Keep doing for other folders after a trap? + Debug.trap("cannot get child item"); + }; + let childItem = lib.deserializeItem(childItemData); + + if (comment) { + lib.STREAM_LINK_COMMENTS; + } else { + if (lib.isFolder(childItem)) { + lib.STREAM_LINK_SUBFOLDERS; + } else { + lib.STREAM_LINK_SUBITEMS; + }; + }; + }; + + /// `key1` and `key2` are like `"st"` and `"rst"` + /// TODO: No need to return an option type + func itemsStream(itemId: (Principal, Nat), key2: Text) + : async* ?lib.Streams + { + let itemId1: CanDBPartition.CanDBPartition = actor(Principal.toText(itemId.0)); + + let streamsData = await itemId1.getAttribute({sk = "i/" # Nat.toText(itemId.1)}, key2); + let streams = switch (streamsData) { + case (?data) { + lib.deserializeStreams(data); + }; + case null { + [null, null, null]; + }; + }; + ?streams; + }; + + /// Voting /// + + /// `amount == 0` means canceling the vote. + public shared({caller}) func vote(parentPrincipal: Principal, parent: Nat, childPrincipal: Principal, child: Nat, value: Int, comment: Bool): async () { + await CanDBIndex.checkSybil(caller); + assert value >= -1 and value <= 1; + + let votingPower = value; + // TODO: Use this: + // let votingPower = Float.toInt(Float.fromInt(value) * PCB.adjustVotingPower(user)); // TODO: `Float.toInt` is a hack. + + let userVotesSK = "v/" # Principal.toText(caller) # "/" # Nat.toText(parent) # "/" # Nat.toText(child); + let oldVotes = await CanDBIndex.getFirstAttribute("user", { sk = userVotesSK; key = "v" }); // TODO: race condition + let (principal, oldValue) = switch (oldVotes) { + case (?oldVotes) { (?oldVotes.0, oldVotes.1) }; + case null { (null, null) }; + }; + let oldValue2 = switch (oldValue) { + case (?v) { + let #int v2 = v else { + Debug.trap("wrong votes"); + }; + v2; + }; + case null { 0 }; + }; + let difference = votingPower - oldValue2; + if (difference == 0) { + return; + }; + // TODO: Take advantage of `principal` as a hint. + ignore await CanDBIndex.putAttributeNoDuplicates("user", { sk = userVotesSK; key = "v"; value = #int votingPower }); + + // Update total votes for the given parent/child: + let totalVotesSK = "w/" # Nat.toText(parent) # "/" # Nat.toText(child); + let oldTotals = await CanDBIndex.getFirstAttribute("user", { sk = totalVotesSK; key = "v" }); // TODO: race condition + let (up, down, oldTotalsPrincipal) = switch (oldTotals) { + case (?(oldTotalsPrincipal, ?(#tuple(a)))) { + let (#int up, #int down) = (a[0], a[1]) else { + Debug.trap("votes programming error") + }; + (up, down, ?oldTotalsPrincipal); + }; + case null { + (0, 0, null); + }; + case _ { + Debug.trap("votes programming error"); + }; + }; + + // TODO: Check this block of code for errors. + let changeUp = (votingPower == 1 and oldValue2 != 1) or (oldValue2 == 1 and votingPower != 1); + let changeDown = (votingPower == -1 and oldValue2 != -1) or (oldValue2 == -1 and votingPower != -1); + var up2 = up; + var down2 = down; + if (changeUp or changeDown) { + if (changeUp) { + up2 += if (difference > 0) { 1 } else { -1 }; + }; + if (changeDown) { + down2 += if (difference > 0) { -1 } else { 1 }; + }; + // TODO: Take advantage of `oldTotalsPrincipal` as a hint: + ignore await CanDBIndex.putAttributeNoDuplicates("user", { sk = totalVotesSK; key = "v"; value = #tuple([#int up2, #int down2]) }); // TODO: race condition + }; + + let parentCanister = actor(Principal.toText(parentPrincipal)) : CanDBPartition.CanDBPartition; + let links = await* getStreamLinks((childPrincipal, child), comment); + let streamsData = await* itemsStream((parentPrincipal, parent), "sv"); + let streamsVar: [var ?Reorder.Order] = switch (streamsData) { + case (?streams) { Array.thaw(streams) }; + case null { [var null, null, null]}; + }; + let order = switch (streamsVar[links]) { + case (?order) { order }; + case null { + await* Reorder.createOrder(GUID.nextGuid(guidGen), NacDBIndex, orderer, ?10000); + }; + }; + if (streamsVar[links] == null) { + streamsVar[links] := ?order; + let data = lib.serializeStreams(Array.freeze(streamsVar)); + await parentCanister.putAttribute({ sk = "i/" # Nat.toText(parent); key = "sv"; value = data }); + }; + + await* Reorder.move(GUID.nextGuid(guidGen), NacDBIndex, orderer, { + order; + value = Nat.toText(child) # "@" # Principal.toText(childPrincipal); + relative = true; + newKey = -difference * 2**16; + }); + }; + + /// Insert item into the beginning of the global list. + public shared({caller}) func insertIntoAllTimeStream(itemId: (Principal, Nat)): async () { + checkCaller(caller); + + let globalTimeStream = await NacDBIndex.getAllItemsStream(); + await* addItemToList(globalTimeStream, itemId, #beginning); // TODO: Implement #beginning special case. + }; + + /// Insert item into the beginning of the global list. + public shared({caller}) func removeFromAllTimeStream(itemId: (Principal, Nat)): async () { + checkCaller(caller); + + let globalTimeStream = await NacDBIndex.getAllItemsStream(); + let value = Nat.toText(itemId.1) # "@" # Principal.toText(itemId.0); + await* Reorder.delete(GUID.nextGuid(guidGen), NacDBIndex, orderer, { order = globalTimeStream; value }); + }; + + // TODO: Below functions? + + // func deserializeVoteAttr(attr: Entity.AttributeValue): Float { + // switch(attr) { + // case (#float v) { v }; + // case _ { Debug.trap("wrong data"); }; + // } + // }; + + // func deserializeVotes(map: Entity.AttributeMap): Float { + // let v = RBT.get(map, Text.compare, "v"); + // switch (v) { + // case (?v) { deserializeVoteAttr(v) }; + // case _ { Debug.trap("map not found") }; + // }; + // }; + + // TODO: It has race period of duplicate (two) keys. In frontend de-duplicate. + // TODO: Use binary keys. + // TODO: Sorting CanDB by `Float` is wrong order. + // func setVotes( + // stream: VotesStream, + // oldVotesRandom: Text, + // votesUpdater: ?Float -> Float, + // oldVotesDBCanisterId: Principal, + // parentChildCanisterId: Principal, + // ): async* () { + // if (StableBuffer.size(stream.settingVotes) != 0) { + // return; + // }; + // let tmp = StableBuffer.get(stream.settingVotes, Int.abs((StableBuffer.size(stream.settingVotes): Int) - 1)); + + // // Prevent races: + // if (not tmp.inProcess) { + // if (BTree.has(stream.currentVotes, Nat64.compare, tmp.parent) or BTree.has(stream.currentVotes, Nat64.compare, tmp.child)) { + // Debug.trap("clash"); + // }; + // ignore BTree.insert(stream.currentVotes, Nat64.compare, tmp.parent, ()); + // ignore BTree.insert(stream.currentVotes, Nat64.compare, tmp.child, ()); + // tmp.inProcess := true; + // }; + + // let oldVotesDB: CanDBPartition.CanDBPartition = actor(Principal.toText(oldVotesDBCanisterId)); + // let oldVotesKey = stream.prefix2 # Nat.toText(xNat.from64ToNat(tmp.parent)) # "/" # Nat.toText(xNat.from64ToNat(tmp.child)); + // let oldVotesWeight = switch (await oldVotesDB.get({sk = oldVotesKey})) { + // case (?oldVotesData) { ?deserializeVotes(oldVotesData.attributes) }; + // case (null) { null } + // }; + // let newVotes = switch (oldVotesWeight) { + // case (?oldVotesWeight) { + // let newVotesWeight = votesUpdater(?oldVotesWeight); + // { weight = newVotesWeight; random = oldVotesRandom }; + // }; + // case (null) { + // let newVotesWeight = votesUpdater null; + // { weight = newVotesWeight; random = rng.next() }; + // }; + // }; + + // // TODO: Should use binary format. // TODO: Decimal serialization makes order by `random` broken. + // // newVotes -> child + // let newKey = stream.prefix1 # Nat.toText(xNat.from64ToNat(tmp.parent)) # "/" # Float.toText(newVotes.weight) # "/" # oldVotesRandom; + // await oldVotesDB.put({sk = newKey; attributes = [("v", #text (Nat.toText(Nat64.toNat(tmp.child))))]}); + // // child -> newVotes + // let parentChildCanister: CanDBPartition.CanDBPartition = actor(Principal.toText(parentChildCanisterId)); + // let newKey2 = stream.prefix2 # Nat.toText(xNat.from64ToNat(tmp.parent)) # "/" # Nat.toText(xNat.from64ToNat(tmp.child)); + // // TODO: Use NacDB: + // await parentChildCanister.put({sk = newKey2; attributes = [("v", #float (newVotes.weight))]}); + // switch (oldVotesWeight) { + // case (?oldVotesWeight) { + // let oldKey = stream.prefix1 # Nat.toText(xNat.from64ToNat(tmp.parent)) # "/" # Float.toText(oldVotesWeight) # "/" # oldVotesRandom; + // // delete oldVotes -> child + // await oldVotesDB.delete({sk = oldKey}); + // }; + // case (null) {}; + // }; + + // ignore StableBuffer.removeLast(stream.settingVotes); + // }; + + // stable var userBusyVoting: BTree.BTree = BTree.init(null); // TODO: Delete old ones. + + // TODO: Need to remember the votes // TODO: Remembering in CanDB makes no sense because need to check canister. + // public shared({caller}) func oneVotePerPersonVote(sybilCanister: Principal) { + // await* checkSybil(sybilCanister, caller); + // ignore BTree.insert(userBusyVoting, Principal.compare, caller, ()); + + // // setVotes( + // // stream: VotesStream, + // // oldVotesRandom: Text, + // // votesUpdater: ?Float -> Float, + // // oldVotesDBCanisterId: Principal, + // // parentChildCanisterId) + // // TODO + // }; + + // func setVotes2(parent: Nat64, child: Nat64, prefix1: Text, prefix2: Text) { + + // } +} \ No newline at end of file diff --git a/e2e/assets/wrong_ids/src/backend/payments.mo b/e2e/assets/wrong_ids/src/backend/payments.mo new file mode 100644 index 0000000000..5165509d90 --- /dev/null +++ b/e2e/assets/wrong_ids/src/backend/payments.mo @@ -0,0 +1,344 @@ +import Principal "mo:base/Principal"; +import Nat64 "mo:base/Nat64"; +import Int "mo:base/Int"; +import Debug "mo:base/Debug"; +import Nat "mo:base/Nat"; +import Array "mo:base/Array"; +import Time "mo:base/Time"; + +import Token "mo:icrc1/ICRC1/Canisters/Token"; +import BTree "mo:stableheapbtreemap/BTree"; +import ICRC1Types "mo:icrc1/ICRC1/Types"; +import CanDBPartition "../storage/CanDBPartition"; +import MyCycles "mo:nacdb/Cycles"; +import lib "lib"; +import PST "canister:pst"; +import Fractions "../libs/helpers/fractions.helper"; +import DBConfig "../libs/configs/db.config"; + +shared({caller = initialOwner}) actor class Payments() = this { + /// Owners /// + + stable var initialized: Bool = false; + stable var owners = [initialOwner]; + + func checkCaller(caller: Principal) { + if (Array.find(owners, func(e: Principal): Bool { e == caller; }) == null) { + Debug.trap("order: not allowed"); + }; + }; + + public shared({caller = caller}) func setOwners(_owners: [Principal]): async () { + checkCaller(caller); + + owners := _owners; + }; + + public query func getOwners(): async [Principal] { owners }; + + public shared({ caller }) func init(_owners: [Principal]): async () { + checkCaller(caller); + ignore MyCycles.topUpCycles(DBConfig.dbOptions.partitionCycles); // TODO: another number of cycles? + if (initialized) { + Debug.trap("already initialized"); + }; + + owners := _owners; + MyCycles.addPart(DBConfig.dbOptions.partitionCycles); + initialized := true; + }; + + /// Tokens /// + + let nativeIPCToken = "ryjl3-tyaaa-aaaaa-aaaba-cai"; // native NNS ICP token. + // let wrappedICPCanisterId = "o5d6i-5aaaa-aaaah-qbz2q-cai"; // https://github.com/C3-Protocol/wicp_docs + // let wrappedICPCanisterId = "utozz-siaaa-aaaam-qaaxq-cai"; // https://dank.ooo/wicp/ (seem to have less UX) + // Also consider using https://github.com/dfinity/examples/tree/master/motoko/invoice-canister + // or https://github.com/research-ag/motoko-lib/blob/main/src/TokenHandler.mo + + stable var ledger: Token.Token = actor(nativeIPCToken); + + /// Shares /// + + stable var salesOwnersShare = Fractions.fdiv(1, 10); // 10% + stable var upvotesOwnersShare = Fractions.fdiv(1, 2); // 50% + stable var uploadOwnersShare = Fractions.fdiv(3, 20); // 15% // TODO: Delete. + stable var buyerAffiliateShare = Fractions.fdiv(1, 10); // 10% + stable var sellerAffiliateShare = Fractions.fdiv(3, 20); // 15% + + public query func getSalesOwnersShare(): async Fractions.Fraction { salesOwnersShare }; + public query func getUpvotesOwnersShare(): async Fractions.Fraction { upvotesOwnersShare }; + public query func getUploadOwnersShare(): async Fractions.Fraction { uploadOwnersShare }; + public query func getBuyerAffiliateShare(): async Fractions.Fraction { buyerAffiliateShare }; + public query func getSellerAffiliateShare(): async Fractions.Fraction { sellerAffiliateShare }; + + public shared({caller}) func setSalesOwnersShare(_share: Fractions.Fraction) { + checkCaller(caller); + + salesOwnersShare := _share; + }; + + public shared({caller}) func setUpvotesOwnersShare(_share: Fractions.Fraction) { + checkCaller(caller); + + upvotesOwnersShare := _share; + }; + + public shared({caller}) func setUploadOwnersShare(_share: Fractions.Fraction) { + checkCaller(caller); + + uploadOwnersShare := _share; + }; + + public shared({caller}) func setBuyerAffiliateShare(_share: Fractions.Fraction) { + checkCaller(caller); + + buyerAffiliateShare := _share; + }; + + public shared({caller}) func setSellerAffiliateShare(_share: Fractions.Fraction) { + checkCaller(caller); + + sellerAffiliateShare := _share; + }; + + ///////////////// + + type IncomingPayment = { + kind: { #payment; #donation }; + itemId: Nat; + amount: ICRC1Types.Balance; + var time: ?Time.Time; + }; + + // func serializePaymentAttr(payment: IncomingPayment): Entity.AttributeValue { + // var buf = Buffer.Buffer(3); + // buf.add(#int (switch (payment.kind) { + // case (#payment) { 0 }; + // case (#donation) { 1 }; + // })); + // buf.add(#int (payment.itemId)); + // buf.add(#int (payment.amount)); + // #tuple (Buffer.toArray(buf)); + // }; + + // func serializePayment(payment: IncomingPayment): [(Entity.AttributeKey, Entity.AttributeValue)] { + // [("v", serializePaymentAttr(payment))]; + // }; + + // func deserializePaymentAttr(attr: Entity.AttributeValue): IncomingPayment { + // var kind: { #payment; #donation } = #payment; + // var itemId: Int = 0; + // var amount = 0; + // let res = label r: Bool switch (attr) { + // case (#tuple arr) { + // var pos = 0; + // while (pos < arr.size()) { + // switch (pos) { + // case (0) { + // switch (arr[pos]) { + // case (#int v) { + // switch (v) { + // case (0) { kind := #payment; }; + // case (1) { kind := #donation; }; + // case _ { break r false }; + // } + // }; + // case _ { break r false }; + // }; + // }; + // case (1) { + // switch (arr[pos]) { + // case (#int v) { + // itemId := v; + // }; + // case _ { break r false }; + // }; + // }; + // case (2) { + // switch (arr[pos]) { + // case (#int v) { + // amount := Int.abs(v); + // }; + // case _ { break r false }; + // }; + // }; + // case _ { break r false; }; + // }; + // pos += 1; + // }; + // true; + // }; + // case _ { + // false; + // }; + // }; + // if (not res) { + // Debug.trap("wrong user format"); + // }; + // { + // kind = kind; + // itemId = itemId; + // amount = amount; + // }; + // }; + + // func deserializePayment(map: Entity.AttributeMap): IncomingPayment { + // let v = RBT.get(map, Text.compare, "v"); + // switch (v) { + // case (?v) { deserializePaymentAttr(v) }; + // case _ { Debug.trap("map not found") }; + // }; + // }; + + // TODO: clean space by removing smallest payments. + stable var currentPayments: BTree.BTree = BTree.init(null); // TODO: Delete old ones. + + // TODO: clean space by removing smallest debts. + stable var ourDebts: BTree.BTree = BTree.init(null); + + public query func getOurDebt(user: Principal): async Nat { + switch (BTree.get(ourDebts, Principal.compare, user)) { + case (?debt) { debt.amount }; + case (null) { 0 }; + }; + }; + + func indebt(to: Principal, amount: Nat) { + if (amount == 0) { + return; + }; + ignore BTree.update(ourDebts, Principal.compare, to, func (old: ?OutgoingPayment): OutgoingPayment { + let sum = switch (old) { + case (?old) { old.amount + amount }; + case (null) { amount }; + }; + { amount = sum; var time = null }; + }); + }; + + // TODO: On non-existent payment it proceeds successful. Is it OK? + // func processPayment(paymentCanisterId: Principal, userId: Principal, _buyerAffiliate: ?Principal, _sellerAffiliate: ?Principal): async () { + // switch (BTree.get(currentPayments, Principal.compare, userId)) { + // case (?payment) { + // let itemKey = "i/" # Nat.toText(payment.itemId); + // switch (await CanDBPartition.getAttribute({sk = itemKey}, "i")) { + // case (?itemRepr) { + // let item = lib.deserializeItem(itemRepr); + // let time = switch (payment.time) { + // case (?time) { time }; + // case (null) { + // let time = Time.now(); + // payment.time := ?time; + // ignore BTree.insert(currentPayments, Principal.compare, userId, payment); + // time; + // }; + // }; + // let fee = await ledger.icrc1_fee(); + // let result = await ledger.icrc1_transfer({ + // from_subaccount = ?Principal.toBlob(userId); + // to = {owner = Principal.fromActor(this); subaccount = null}; + // amount = payment.amount - fee; + // fee = null; + // memo = null; + // created_at_time = ?Nat64.fromNat(Int.abs(time)); // idempotent + // }); + // switch (result) { + // case (#Ok _ or #Err (#Duplicate _)) {}; + // case _ { Debug.trap("can't pay") }; + // }; + // let _shareholdersShare = Fractions.mul(payment.amount, salesOwnersShare); + // recalculateShareholdersDebt(Int.abs(_shareholdersShare), _buyerAffiliate, _sellerAffiliate); + // let toAuthor = payment.amount - _shareholdersShare; + // indebt(item.creator, Int.abs(toAuthor)); + // }; + // case (null) {}; + // }; + // ignore BTree.delete(currentPayments, Principal.compare, userId); + // }; + // case (null) {}; + // }; + // }; + + /// Dividents and Withdrawals /// + + var totalDividends = 0; + var totalDividendsPaid = 0; // actually paid sum + // TODO: Set a heavy transfer fee of the PST to ensure that `lastTotalDivedends` doesn't take much memory. + stable var lastTotalDivedends: BTree.BTree = BTree.init(null); + + func _dividendsOwing(_account: Principal): async Nat { + let lastTotal = switch (BTree.get(lastTotalDivedends, Principal.compare, _account)) { + case (?value) { value }; + case (null) { 0 }; + }; + let _newDividends = Int.abs((totalDividends: Int) - lastTotal); + // rounding down + let balance = await PST.icrc1_balance_of({owner = _account; subaccount = null}); + let total = await PST.icrc1_total_supply(); + balance * _newDividends / total; + }; + + func recalculateShareholdersDebt(_amount: Nat, _buyerAffiliate: ?Principal, _sellerAffiliate: ?Principal) { + // Affiliates are delivered by frontend. + // address payable _buyerAffiliate = affiliates[msg.sender]; + // address payable _sellerAffiliate = affiliates[_author]; + var _shareHoldersAmount = _amount; + switch (_buyerAffiliate) { + case (?_buyerAffiliate) { + let _buyerAffiliateAmount = Int.abs(Fractions.mul(_amount, buyerAffiliateShare)); + indebt(_buyerAffiliate, _buyerAffiliateAmount); + if (_shareHoldersAmount < _buyerAffiliateAmount) { + Debug.trap("negative amount to pay"); + }; + _shareHoldersAmount -= _buyerAffiliateAmount; + }; + case (null) {}; + }; + switch (_sellerAffiliate) { + case (?_sellerAffiliate) { + let _sellerAffiliateAmount = Int.abs(Fractions.mul(_amount, sellerAffiliateShare)); + indebt(_sellerAffiliate, _sellerAffiliateAmount); + if (_shareHoldersAmount < _sellerAffiliateAmount) { + Debug.trap("negative amount to pay"); + }; + _shareHoldersAmount -= _sellerAffiliateAmount; + }; + case (null) {}; + }; + totalDividends += _shareHoldersAmount; + }; + + /// Outgoing Payments /// + + type OutgoingPayment = { + amount: ICRC1Types.Balance; + var time: ?Time.Time; + }; + + public shared({caller}) func payout(subaccount: ?ICRC1Types.Subaccount) { + switch (BTree.get(ourDebts, Principal.compare, caller)) { + case (?payment) { + let time = switch (payment.time) { + case (?time) { time }; + case (null) { + let time = Time.now(); + payment.time := ?time; + time; + } + }; + let fee = await ledger.icrc1_fee(); + let result = await ledger.icrc1_transfer({ + from_subaccount = null; + to = {owner = caller; subaccount = subaccount}; + amount = payment.amount - fee; + fee = null; + memo = null; + created_at_time = ?Nat64.fromNat(Int.abs(time)); // idempotent + }); + ignore BTree.delete(ourDebts, Principal.compare, caller); + }; + case (null) {}; + } + }; +} \ No newline at end of file diff --git a/e2e/assets/wrong_ids/src/backend/personhood.mo b/e2e/assets/wrong_ids/src/backend/personhood.mo new file mode 100644 index 0000000000..eb7150238d --- /dev/null +++ b/e2e/assets/wrong_ids/src/backend/personhood.mo @@ -0,0 +1,87 @@ +import Time "mo:base/Time"; +import Debug "mo:base/Debug"; +import Principal "mo:base/Principal"; + +import CanDBIndex "canister:CanDBIndex"; +import ic_eth "canister:ic_eth"; +import Types "mo:passport-client/lib/Types"; +import V "mo:passport-client/lib/Verifier"; +import PassportConfig "../libs/configs/passport.config"; + +actor Personhood { + /// Shared /// + + // TODO: canister hint for ethereumAddress + func controlEthereumAddress(caller: Principal, address: Text): async* () { + let callerText = Principal.toText(caller); + // TODO: race: + let pa = await CanDBIndex.getFirstAttribute("user", { sk = address; key = "p" }); + switch (pa) { + case (?(p, ?#text a)) { + if (a != callerText) { + Debug.trap("attempt to use other's Ethereum address"); + } + }; + case _ { + // TODO: Optimize performance: + ignore await CanDBIndex.putAttributeNoDuplicates( + "user", + { sk = address; key = "p"; value = #text callerText }, + ); + }; + }; + }; + + // TODO: This function is unused + public shared({caller}) func scoreBySignedEthereumAddress({address: Text; signature: Text; nonce: Text}): async Text { + await* controlEthereumAddress(caller, address); + // A real app would store the verified address somewhere instead of just returning the score to frontend. + // Use `extractItemScoreFromBody` or `extractItemScoreFromJSON` to extract score. + let body = await* V.scoreBySignedEthereumAddress({ + ic_eth; + address; + signature; + nonce; + config = PassportConfig.configScorer; + transform = removeHTTPHeaders; + }); + let score = V.extractItemScoreFromBody(body); + await CanDBIndex.setVotingData(caller, null, { // TODO: Provide partition hint. + points = score; + lastChecked = Time.now(); + ethereumAddress = address; + config = PassportConfig.configScorer; + }); + body; + }; + + public shared({caller}) func submitSignedEthereumAddressForScore({address: Text; signature: Text; nonce: Text}): async Text { + await* controlEthereumAddress(caller, address); + // A real app would store the verified address somewhere instead of just returning the score to frontend. + // Use `extractItemScoreFromBody` or `extractItemScoreFromJSON` to extract score. + let body = await* V.submitSignedEthereumAddressForScore({ + ic_eth; + address; + signature; + nonce; + config = PassportConfig.configScorer; + transform = removeHTTPHeaders; + }); + let score = V.extractItemScoreFromBody(body); + await CanDBIndex.setVotingData(caller, null, { // TODO: Provide partition hint, not `null`. + points = score; + lastChecked = Time.now(); + ethereumAddress = address; + config = PassportConfig.configScorer; + }); + body; + }; + + public shared func getEthereumSigningMessage(): async {message: Text; nonce: Text} { + await* V.getEthereumSigningMessage({transform = removeHTTPHeaders; config = PassportConfig.configScorer}); + }; + + public shared query func removeHTTPHeaders(args: Types.TransformArgs): async Types.HttpResponsePayload { + V.removeHTTPHeaders(args); + }; +} \ No newline at end of file diff --git a/e2e/assets/wrong_ids/src/backend/pst.mo b/e2e/assets/wrong_ids/src/backend/pst.mo new file mode 100644 index 0000000000..aec1320be6 --- /dev/null +++ b/e2e/assets/wrong_ids/src/backend/pst.mo @@ -0,0 +1,86 @@ +import Principal "mo:base/Principal"; +import ExperimentalCycles "mo:base/ExperimentalCycles"; +import ICRC1 "mo:icrc1/ICRC1"; +// import ICRC1Types "mo:icrc1/ICRC1/Types"; + +shared ({ caller = initialOwner }) actor class PST() : async ICRC1.FullInterface { + stable var initialized: Bool = false; + + stable let token = ICRC1.init({ + advanced_settings = null; + decimals = 5; + fee = 10_000; + initial_balances = [({owner = initialOwner; subaccount = null}, 10_000_000_000)]; + max_supply = 10_000_000_000; + min_burn_amount = 100_000; + minting_account = { owner = Principal.fromText("aaaaa-aa"); subaccount = null; }; // nobody + name = "Zon Directory PST token"; + symbol = "ZDPST"; // FIXME + }); + + /// Functions for the ICRC1 token standard + public shared query func icrc1_name() : async Text { + ICRC1.name(token); + }; + + public shared query func icrc1_symbol() : async Text { + ICRC1.symbol(token); + }; + + public shared query func icrc1_decimals() : async Nat8 { + ICRC1.decimals(token); + }; + + public shared query func icrc1_fee() : async ICRC1.Balance { + ICRC1.fee(token); + }; + + public shared query func icrc1_metadata() : async [ICRC1.MetaDatum] { + ICRC1.metadata(token); + }; + + public shared query func icrc1_total_supply() : async ICRC1.Balance { + ICRC1.total_supply(token); + }; + + public shared query func icrc1_minting_account() : async ?ICRC1.Account { + ?ICRC1.minting_account(token); + }; + + public shared query func icrc1_balance_of(args : ICRC1.Account) : async ICRC1.Balance { + ICRC1.balance_of(token, args); + }; + + public shared query func icrc1_supported_standards() : async [ICRC1.SupportedStandard] { + ICRC1.supported_standards(token); + }; + + public shared ({ caller }) func icrc1_transfer(args : ICRC1.TransferArgs) : async ICRC1.TransferResult { + await* ICRC1.transfer(token, args, caller); + }; + + public shared ({ caller }) func mint(args : ICRC1.Mint) : async ICRC1.TransferResult { + await* ICRC1.mint(token, args, caller); + }; + + public shared ({ caller }) func burn(args : ICRC1.BurnArgs) : async ICRC1.TransferResult { + await* ICRC1.burn(token, args, caller); + }; + + // Functions from the rosetta icrc1 ledger + public shared query func get_transactions(req : ICRC1.GetTransactionsRequest) : async ICRC1.GetTransactionsResponse { + ICRC1.get_transactions(token, req); + }; + + // Additional functions not included in the ICRC1 standard + public shared func get_transaction(i : ICRC1.TxIndex) : async ?ICRC1.Transaction { + await* ICRC1.get_transaction(token, i); + }; + + // Deposit cycles into this archive canister. + public shared func deposit_cycles() : async () { + let amount = ExperimentalCycles.available(); + let accepted = ExperimentalCycles.accept(amount); + assert (accepted == amount); + }; +}; diff --git a/e2e/assets/wrong_ids/src/custom.d.ts b/e2e/assets/wrong_ids/src/custom.d.ts new file mode 100644 index 0000000000..2ff147833c --- /dev/null +++ b/e2e/assets/wrong_ids/src/custom.d.ts @@ -0,0 +1,4 @@ +declare module "*.svg" { + const content: React.FunctionComponent>; + export default content; +} diff --git a/e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did b/e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did new file mode 100644 index 0000000000..de595ac93a --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did @@ -0,0 +1,83 @@ +type VotingScore = + record { + ethereumAddress: text; + lastChecked: Time; + points: float64; + }; +type UpgradePKRangeResult = + record { + nextKey: opt text; + upgradeCanisterResults: vec record { + text; + InterCanisterActionResult; + }; + }; +type Time = int; +type SK = text; +type InterCanisterActionResult = + variant { + err: text; + ok; + }; +type CanDBIndex = + service { + autoScaleCanister: (text) -> (text); + checkSybil: (principal) -> (); + /// @required API (Do not delete or change) + /// + /// Get all canisters for an specific PK + /// + /// This method is called often by the candb-client query & update methods. + getCanistersByPK: (text) -> (vec text) query; + getFirstAttribute: (text, record { + key: AttributeKey; + sk: SK; + }) -> + (opt record { + principal; + opt AttributeValue; + }); + getOwners: () -> (vec principal) query; + init: (vec principal) -> (); + putAttributeNoDuplicates: (text, + record { + key: AttributeKey; + sk: SK; + value: AttributeValue; + }) -> (principal); + putAttributeWithPossibleDuplicate: (text, + record { + key: AttributeKey; + sk: SK; + value: AttributeValue; + }) -> (principal); + setOwners: (vec principal) -> (); + setVotingData: (principal, opt principal, VotingScore) -> (); + sybilScore: () -> (bool, float64); + /// This hook is called by CanDB for AutoScaling the User Service Actor. + /// + /// If the developer does not spin up an additional User canister in the same partition within this method, auto-scaling will NOT work + /// Upgrade user canisters in a PK range, i.e. rolling upgrades (limit is fixed at upgrading the canisters of 5 PKs per call) + upgradeAllPartitionCanisters: (blob) -> (UpgradePKRangeResult); + }; +type AttributeValuePrimitive = + variant { + "bool": bool; + float: float64; + "int": int; + "text": text; + }; +type AttributeValue = + variant { + arrayBool: vec bool; + arrayFloat: vec float64; + arrayInt: vec int; + arrayText: vec text; + "bool": bool; + float: float64; + "int": int; + "text": text; + tuple: vec AttributeValuePrimitive; + }; +type AttributeKey = text; +service : () -> CanDBIndex diff --git a/e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did.d.ts b/e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did.d.ts new file mode 100644 index 0000000000..0b94ed2d93 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did.d.ts @@ -0,0 +1,63 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export type AttributeKey = string; +export type AttributeValue = { 'int' : bigint } | + { 'float' : number } | + { 'tuple' : Array } | + { 'bool' : boolean } | + { 'text' : string } | + { 'arrayBool' : Array } | + { 'arrayText' : Array } | + { 'arrayInt' : Array } | + { 'arrayFloat' : Array }; +export type AttributeValuePrimitive = { 'int' : bigint } | + { 'float' : number } | + { 'bool' : boolean } | + { 'text' : string }; +export interface CanDBIndex { + 'autoScaleCanister' : ActorMethod<[string], string>, + 'checkSybil' : ActorMethod<[Principal], undefined>, + 'getCanistersByPK' : ActorMethod<[string], Array>, + 'getFirstAttribute' : ActorMethod< + [string, { 'sk' : SK, 'key' : AttributeKey }], + [] | [[Principal, [] | [AttributeValue]]] + >, + 'getOwners' : ActorMethod<[], Array>, + 'init' : ActorMethod<[Array], undefined>, + 'putAttributeNoDuplicates' : ActorMethod< + [string, { 'sk' : SK, 'key' : AttributeKey, 'value' : AttributeValue }], + Principal + >, + 'putAttributeWithPossibleDuplicate' : ActorMethod< + [string, { 'sk' : SK, 'key' : AttributeKey, 'value' : AttributeValue }], + Principal + >, + 'setOwners' : ActorMethod<[Array], undefined>, + 'setVotingData' : ActorMethod< + [Principal, [] | [Principal], VotingScore], + undefined + >, + 'sybilScore' : ActorMethod<[], [boolean, number]>, + 'upgradeAllPartitionCanisters' : ActorMethod< + [Uint8Array | number[]], + UpgradePKRangeResult + >, +} +export type InterCanisterActionResult = { 'ok' : null } | + { 'err' : string }; +export type SK = string; +export type Time = bigint; +export interface UpgradePKRangeResult { + 'nextKey' : [] | [string], + 'upgradeCanisterResults' : Array<[string, InterCanisterActionResult]>, +} +export interface VotingScore { + 'ethereumAddress' : string, + 'lastChecked' : Time, + 'points' : number, +} +export interface _SERVICE extends CanDBIndex {} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did.js b/e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did.js new file mode 100644 index 0000000000..8e44f9c1d7 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/CanDBIndex/CanDBIndex.did.js @@ -0,0 +1,87 @@ +export const idlFactory = ({ IDL }) => { + const SK = IDL.Text; + const AttributeKey = IDL.Text; + const AttributeValuePrimitive = IDL.Variant({ + 'int' : IDL.Int, + 'float' : IDL.Float64, + 'bool' : IDL.Bool, + 'text' : IDL.Text, + }); + const AttributeValue = IDL.Variant({ + 'int' : IDL.Int, + 'float' : IDL.Float64, + 'tuple' : IDL.Vec(AttributeValuePrimitive), + 'bool' : IDL.Bool, + 'text' : IDL.Text, + 'arrayBool' : IDL.Vec(IDL.Bool), + 'arrayText' : IDL.Vec(IDL.Text), + 'arrayInt' : IDL.Vec(IDL.Int), + 'arrayFloat' : IDL.Vec(IDL.Float64), + }); + const Time = IDL.Int; + const VotingScore = IDL.Record({ + 'ethereumAddress' : IDL.Text, + 'lastChecked' : Time, + 'points' : IDL.Float64, + }); + const InterCanisterActionResult = IDL.Variant({ + 'ok' : IDL.Null, + 'err' : IDL.Text, + }); + const UpgradePKRangeResult = IDL.Record({ + 'nextKey' : IDL.Opt(IDL.Text), + 'upgradeCanisterResults' : IDL.Vec( + IDL.Tuple(IDL.Text, InterCanisterActionResult) + ), + }); + const CanDBIndex = IDL.Service({ + 'autoScaleCanister' : IDL.Func([IDL.Text], [IDL.Text], []), + 'checkSybil' : IDL.Func([IDL.Principal], [], []), + 'getCanistersByPK' : IDL.Func([IDL.Text], [IDL.Vec(IDL.Text)], ['query']), + 'getFirstAttribute' : IDL.Func( + [IDL.Text, IDL.Record({ 'sk' : SK, 'key' : AttributeKey })], + [IDL.Opt(IDL.Tuple(IDL.Principal, IDL.Opt(AttributeValue)))], + [], + ), + 'getOwners' : IDL.Func([], [IDL.Vec(IDL.Principal)], ['query']), + 'init' : IDL.Func([IDL.Vec(IDL.Principal)], [], []), + 'putAttributeNoDuplicates' : IDL.Func( + [ + IDL.Text, + IDL.Record({ + 'sk' : SK, + 'key' : AttributeKey, + 'value' : AttributeValue, + }), + ], + [IDL.Principal], + [], + ), + 'putAttributeWithPossibleDuplicate' : IDL.Func( + [ + IDL.Text, + IDL.Record({ + 'sk' : SK, + 'key' : AttributeKey, + 'value' : AttributeValue, + }), + ], + [IDL.Principal], + [], + ), + 'setOwners' : IDL.Func([IDL.Vec(IDL.Principal)], [], []), + 'setVotingData' : IDL.Func( + [IDL.Principal, IDL.Opt(IDL.Principal), VotingScore], + [], + [], + ), + 'sybilScore' : IDL.Func([], [IDL.Bool, IDL.Float64], []), + 'upgradeAllPartitionCanisters' : IDL.Func( + [IDL.Vec(IDL.Nat8)], + [UpgradePKRangeResult], + [], + ), + }); + return CanDBIndex; +}; +export const init = ({ IDL }) => { return []; }; diff --git a/e2e/assets/wrong_ids/src/declarations/CanDBIndex/index.d.ts b/e2e/assets/wrong_ids/src/declarations/CanDBIndex/index.d.ts new file mode 100644 index 0000000000..92b9932fb4 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/CanDBIndex/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './CanDBIndex.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const CanDBIndex: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/CanDBIndex/index.js b/e2e/assets/wrong_ids/src/declarations/CanDBIndex/index.js new file mode 100644 index 0000000000..448f2f74f8 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/CanDBIndex/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./CanDBIndex.did.js"; +export { idlFactory } from "./CanDBIndex.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_CANDBINDEX; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const CanDBIndex = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did b/e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did new file mode 100644 index 0000000000..c72e2e913a --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did @@ -0,0 +1,341 @@ +type Tree = + variant { + leaf; + node: record { + Color; + Tree; + record { + AttributeKey; + opt AttributeValue; + }; + Tree; + }; + }; +type SubDBSizeOuterOptions = record {outer: OuterPair;}; +type Streams = vec opt Order; +type ScanResult = + record { + entities: vec Entity; + nextKey: opt SK; + }; +type ScanOptions = + record { + ascending: opt bool; + limit: nat; + skLowerBound: SK; + skUpperBound: SK; + }; +type ScanLimitResult = + record { + nextKey: opt text; + results: vec record { + text; + AttributeValue__1; + }; + }; +type ScalingOptions = + record { + autoScalingHook: AutoScalingCanisterSharedFunctionHook; + sizeLimit: ScalingLimitType; + }; +type ScalingLimitType = + variant { + count: nat; + heapSize: nat; + }; +type SK__1 = text; +type SK = text; +type PutOptions = + record { + attributes: vec record { + AttributeKey; + AttributeValue; + }; + sk: SK; + }; +type PK = text; +type OuterSubDBKey = nat; +type OuterPair = + record { + canister: OuterCanister; + key: OuterSubDBKey; + }; +type OuterCanister = + service { + createOuter: + (record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + part: principal; + }) -> + (record { + inner: record { + canister: principal; + key: InnerSubDBKey; + }; + outer: record { + canister: principal; + key: OuterSubDBKey; + }; + }); + deleteInner: (record { + innerKey: InnerSubDBKey; + sk: SK__1; + }) -> (); + deleteSubDBInner: (record {innerKey: InnerSubDBKey;}) -> (); + deleteSubDBOuter: (record {outerKey: OuterSubDBKey;}) -> (); + getByInner: (record { + innerKey: InnerSubDBKey; + sk: SK__1; + }) -> (opt AttributeValue__1) query; + getByOuter: (record { + outerKey: OuterSubDBKey; + sk: SK__1; + }) -> (opt AttributeValue__1); + getInner: (record {outerKey: OuterSubDBKey;}) -> + (opt record { + canister: principal; + key: InnerSubDBKey; + }) query; + getOuter: (GetByOuterPartitionKeyOptions) -> (opt AttributeValue__1); + getSubDBUserDataInner: (record {innerKey: InnerSubDBKey;}) -> (opt text); + getSubDBUserDataOuter: (GetUserDataOuterOptions) -> (opt text); + hasByInner: (record { + innerKey: InnerSubDBKey; + sk: SK__1; + }) -> (bool) query; + hasByOuter: (record { + outerKey: OuterSubDBKey; + sk: SK__1; + }) -> (bool); + hasSubDBByInner: (record {innerKey: InnerSubDBKey;}) -> (bool) query; + hasSubDBByOuter: (record {outerKey: OuterSubDBKey;}) -> (bool); + isOverflowed: () -> (bool) query; + putLocation: + (record { + innerCanister: principal; + newInnerSubDBKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }) -> (); + rawDeleteSubDB: (record {innerKey: InnerSubDBKey;}) -> (); + rawGetSubDB: (record {innerKey: InnerSubDBKey;}) -> + (opt record { + map: vec record { + SK__1; + AttributeValue__1; + }; + userData: text; + }) query; + rawInsertSubDB: + (record { + hardCap: opt nat; + innerKey: opt InnerSubDBKey; + map: vec record { + SK__1; + AttributeValue__1; + }; + userData: text; + }) -> (record {innerKey: InnerSubDBKey;}); + rawInsertSubDBAndSetOuter: + (record { + hardCap: opt nat; + keys: opt record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }; + map: vec record { + SK__1; + AttributeValue__1; + }; + userData: text; + }) -> (record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }); + scanLimitInner: + (record { + dir: Direction; + innerKey: InnerSubDBKey; + limit: nat; + lowerBound: SK__1; + upperBound: SK__1; + }) -> (ScanLimitResult) query; + scanLimitOuter: + (record { + dir: Direction; + limit: nat; + lowerBound: SK__1; + outerKey: OuterSubDBKey; + upperBound: SK__1; + }) -> (ScanLimitResult); + scanSubDBs: () -> + (vec + record { + OuterSubDBKey; + record { + canister: principal; + key: InnerSubDBKey; + }; + }) query; + startInsertingImpl: + (record { + innerKey: InnerSubDBKey; + sk: SK__1; + value: AttributeValue__1; + }) -> (); + subDBSizeByInner: (record {innerKey: InnerSubDBKey;}) -> (opt nat) query; + subDBSizeByOuter: (record {outerKey: OuterSubDBKey;}) -> (opt nat); + subDBSizeOuterImpl: (SubDBSizeOuterOptions) -> (opt nat); + superDBSize: () -> (nat) query; + }; +type Order = + record { + order: record { + OuterCanister; + OuterSubDBKey; + }; + reverse: record { + OuterCanister; + OuterSubDBKey; + }; + }; +type ItemTransfer = + record { + communal: bool; + data: ItemData; + }; +type ItemDetails = + variant { + folder; + link: text; + message; + post; + }; +type ItemDataWithoutOwner = + record { + description: text; + details: ItemDetails; + locale: text; + price: float64; + title: text; + }; +type ItemData = + record { + creator: principal; + edited: bool; + item: ItemDataWithoutOwner; + }; +type InnerSubDBKey = nat; +type GetUserDataOuterOptions = record {outer: OuterPair;}; +type GetOptions = record {sk: SK;}; +type GetByOuterPartitionKeyOptions = + record { + outer: OuterPair; + sk: SK__1; + }; +type Entity = + record { + attributes: AttributeMap; + pk: PK; + sk: SK; + }; +type Direction = + variant { + bwd; + fwd; + }; +type DeleteOptions = record {sk: SK;}; +type Color = + variant { + B; + R; + }; +type CanDBPartition = + service { + delete: (DeleteOptions) -> (); + get: (GetOptions) -> (opt Entity) query; + getAttribute: (GetOptions, text) -> (opt AttributeValue) query; + getItem: (nat) -> (opt ItemTransfer); + getOwners: () -> (vec principal) query; + /// @recommended (not required) public API + getPK: () -> (text) query; + getStreams: (nat, text) -> (opt Streams) query; + put: (PutOptions) -> (); + putAttribute: + (record { + key: AttributeKey; + sk: SK; + value: AttributeValue; + }) -> (); + putExisting: (PutOptions) -> (bool); + putExistingAttribute: + (record { + key: AttributeKey; + sk: SK; + value: AttributeValue; + }) -> (bool); + scan: (ScanOptions) -> (ScanResult) query; + setOwners: (vec principal) -> (); + /// @required public API (Do not delete or change) + skExists: (text) -> (bool) query; + /// @required public API (Do not delete or change) + transferCycles: () -> (); + }; +type AutoScalingCanisterSharedFunctionHook = func (text) -> (text); +type AttributeValue__1 = + variant { + arrayBool: vec bool; + arrayFloat: vec float64; + arrayInt: vec int; + arrayText: vec text; + "bool": bool; + float: float64; + "int": int; + "text": text; + tuple: vec AttributeValuePrimitive__1; + }; +type AttributeValuePrimitive__1 = + variant { + "bool": bool; + float: float64; + "int": int; + "text": text; + }; +type AttributeValuePrimitive = + variant { + "bool": bool; + float: float64; + "int": int; + "text": text; + }; +type AttributeValue = + variant { + arrayBool: vec bool; + arrayFloat: vec float64; + arrayInt: vec int; + arrayText: vec text; + "bool": bool; + float: float64; + "int": int; + "text": text; + tuple: vec AttributeValuePrimitive; + }; +type AttributeMap = + variant { + leaf; + node: record { + Color; + Tree; + record { + AttributeKey; + opt AttributeValue; + }; + Tree; + }; + }; +type AttributeKey = text; +service : (record { + owners: opt vec principal; + partitionKey: text; + scalingOptions: ScalingOptions; + }) -> CanDBPartition diff --git a/e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did.d.ts b/e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did.d.ts new file mode 100644 index 0000000000..ccd3ea4388 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did.d.ts @@ -0,0 +1,267 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export type AttributeKey = string; +export type AttributeMap = { 'leaf' : null } | + { 'node' : [Color, Tree, [AttributeKey, [] | [AttributeValue]], Tree] }; +export type AttributeValue = { 'int' : bigint } | + { 'float' : number } | + { 'tuple' : Array } | + { 'bool' : boolean } | + { 'text' : string } | + { 'arrayBool' : Array } | + { 'arrayText' : Array } | + { 'arrayInt' : Array } | + { 'arrayFloat' : Array }; +export type AttributeValuePrimitive = { 'int' : bigint } | + { 'float' : number } | + { 'bool' : boolean } | + { 'text' : string }; +export type AttributeValuePrimitive__1 = { 'int' : bigint } | + { 'float' : number } | + { 'bool' : boolean } | + { 'text' : string }; +export type AttributeValue__1 = { 'int' : bigint } | + { 'float' : number } | + { 'tuple' : Array } | + { 'bool' : boolean } | + { 'text' : string } | + { 'arrayBool' : Array } | + { 'arrayText' : Array } | + { 'arrayInt' : Array } | + { 'arrayFloat' : Array }; +export type AutoScalingCanisterSharedFunctionHook = ActorMethod< + [string], + string +>; +export interface CanDBPartition { + 'delete' : ActorMethod<[DeleteOptions], undefined>, + 'get' : ActorMethod<[GetOptions], [] | [Entity]>, + 'getAttribute' : ActorMethod<[GetOptions, string], [] | [AttributeValue]>, + 'getItem' : ActorMethod<[bigint], [] | [ItemTransfer]>, + 'getOwners' : ActorMethod<[], Array>, + 'getPK' : ActorMethod<[], string>, + 'getStreams' : ActorMethod<[bigint, string], [] | [Streams]>, + 'put' : ActorMethod<[PutOptions], undefined>, + 'putAttribute' : ActorMethod< + [{ 'sk' : SK, 'key' : AttributeKey, 'value' : AttributeValue }], + undefined + >, + 'putExisting' : ActorMethod<[PutOptions], boolean>, + 'putExistingAttribute' : ActorMethod< + [{ 'sk' : SK, 'key' : AttributeKey, 'value' : AttributeValue }], + boolean + >, + 'scan' : ActorMethod<[ScanOptions], ScanResult>, + 'setOwners' : ActorMethod<[Array], undefined>, + 'skExists' : ActorMethod<[string], boolean>, + 'transferCycles' : ActorMethod<[], undefined>, +} +export type Color = { 'B' : null } | + { 'R' : null }; +export interface DeleteOptions { 'sk' : SK } +export type Direction = { 'bwd' : null } | + { 'fwd' : null }; +export interface Entity { 'pk' : PK, 'sk' : SK, 'attributes' : AttributeMap } +export interface GetByOuterPartitionKeyOptions { + 'sk' : SK__1, + 'outer' : OuterPair, +} +export interface GetOptions { 'sk' : SK } +export interface GetUserDataOuterOptions { 'outer' : OuterPair } +export type InnerSubDBKey = bigint; +export interface ItemData { + 'creator' : Principal, + 'edited' : boolean, + 'item' : ItemDataWithoutOwner, +} +export interface ItemDataWithoutOwner { + 'title' : string, + 'locale' : string, + 'description' : string, + 'details' : ItemDetails, + 'price' : number, +} +export type ItemDetails = { 'link' : string } | + { 'post' : null } | + { 'message' : null } | + { 'folder' : null }; +export interface ItemTransfer { 'data' : ItemData, 'communal' : boolean } +export interface Order { + 'reverse' : [OuterCanister, OuterSubDBKey], + 'order' : [OuterCanister, OuterSubDBKey], +} +export interface OuterCanister { + 'createOuter' : ActorMethod< + [ + { + 'part' : Principal, + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }, + ], + { + 'outer' : { 'key' : OuterSubDBKey, 'canister' : Principal }, + 'inner' : { 'key' : InnerSubDBKey, 'canister' : Principal }, + } + >, + 'deleteInner' : ActorMethod< + [{ 'sk' : SK__1, 'innerKey' : InnerSubDBKey }], + undefined + >, + 'deleteSubDBInner' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], undefined>, + 'deleteSubDBOuter' : ActorMethod<[{ 'outerKey' : OuterSubDBKey }], undefined>, + 'getByInner' : ActorMethod< + [{ 'sk' : SK__1, 'innerKey' : InnerSubDBKey }], + [] | [AttributeValue__1] + >, + 'getByOuter' : ActorMethod< + [{ 'sk' : SK__1, 'outerKey' : OuterSubDBKey }], + [] | [AttributeValue__1] + >, + 'getInner' : ActorMethod< + [{ 'outerKey' : OuterSubDBKey }], + [] | [{ 'key' : InnerSubDBKey, 'canister' : Principal }] + >, + 'getOuter' : ActorMethod< + [GetByOuterPartitionKeyOptions], + [] | [AttributeValue__1] + >, + 'getSubDBUserDataInner' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [string] + >, + 'getSubDBUserDataOuter' : ActorMethod< + [GetUserDataOuterOptions], + [] | [string] + >, + 'hasByInner' : ActorMethod< + [{ 'sk' : SK__1, 'innerKey' : InnerSubDBKey }], + boolean + >, + 'hasByOuter' : ActorMethod< + [{ 'sk' : SK__1, 'outerKey' : OuterSubDBKey }], + boolean + >, + 'hasSubDBByInner' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], boolean>, + 'hasSubDBByOuter' : ActorMethod<[{ 'outerKey' : OuterSubDBKey }], boolean>, + 'isOverflowed' : ActorMethod<[], boolean>, + 'putLocation' : ActorMethod< + [ + { + 'newInnerSubDBKey' : InnerSubDBKey, + 'innerCanister' : Principal, + 'outerKey' : OuterSubDBKey, + }, + ], + undefined + >, + 'rawDeleteSubDB' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], undefined>, + 'rawGetSubDB' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [{ 'map' : Array<[SK__1, AttributeValue__1]>, 'userData' : string }] + >, + 'rawInsertSubDB' : ActorMethod< + [ + { + 'map' : Array<[SK__1, AttributeValue__1]>, + 'userData' : string, + 'hardCap' : [] | [bigint], + 'innerKey' : [] | [InnerSubDBKey], + }, + ], + { 'innerKey' : InnerSubDBKey } + >, + 'rawInsertSubDBAndSetOuter' : ActorMethod< + [ + { + 'map' : Array<[SK__1, AttributeValue__1]>, + 'userData' : string, + 'keys' : [] | [ + { 'outerKey' : OuterSubDBKey, 'innerKey' : InnerSubDBKey } + ], + 'hardCap' : [] | [bigint], + }, + ], + { 'outerKey' : OuterSubDBKey, 'innerKey' : InnerSubDBKey } + >, + 'scanLimitInner' : ActorMethod< + [ + { + 'dir' : Direction, + 'lowerBound' : SK__1, + 'limit' : bigint, + 'upperBound' : SK__1, + 'innerKey' : InnerSubDBKey, + }, + ], + ScanLimitResult + >, + 'scanLimitOuter' : ActorMethod< + [ + { + 'dir' : Direction, + 'lowerBound' : SK__1, + 'limit' : bigint, + 'upperBound' : SK__1, + 'outerKey' : OuterSubDBKey, + }, + ], + ScanLimitResult + >, + 'scanSubDBs' : ActorMethod< + [], + Array<[OuterSubDBKey, { 'key' : InnerSubDBKey, 'canister' : Principal }]> + >, + 'startInsertingImpl' : ActorMethod< + [{ 'sk' : SK__1, 'value' : AttributeValue__1, 'innerKey' : InnerSubDBKey }], + undefined + >, + 'subDBSizeByInner' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [bigint] + >, + 'subDBSizeByOuter' : ActorMethod< + [{ 'outerKey' : OuterSubDBKey }], + [] | [bigint] + >, + 'subDBSizeOuterImpl' : ActorMethod<[SubDBSizeOuterOptions], [] | [bigint]>, + 'superDBSize' : ActorMethod<[], bigint>, +} +export interface OuterPair { 'key' : OuterSubDBKey, 'canister' : OuterCanister } +export type OuterSubDBKey = bigint; +export type PK = string; +export interface PutOptions { + 'sk' : SK, + 'attributes' : Array<[AttributeKey, AttributeValue]>, +} +export type SK = string; +export type SK__1 = string; +export type ScalingLimitType = { 'heapSize' : bigint } | + { 'count' : bigint }; +export interface ScalingOptions { + 'autoScalingHook' : AutoScalingCanisterSharedFunctionHook, + 'sizeLimit' : ScalingLimitType, +} +export interface ScanLimitResult { + 'results' : Array<[string, AttributeValue__1]>, + 'nextKey' : [] | [string], +} +export interface ScanOptions { + 'limit' : bigint, + 'ascending' : [] | [boolean], + 'skLowerBound' : SK, + 'skUpperBound' : SK, +} +export interface ScanResult { + 'entities' : Array, + 'nextKey' : [] | [SK], +} +export type Streams = Array<[] | [Order]>; +export interface SubDBSizeOuterOptions { 'outer' : OuterPair } +export type Tree = { 'leaf' : null } | + { 'node' : [Color, Tree, [AttributeKey, [] | [AttributeValue]], Tree] }; +export interface _SERVICE extends CanDBPartition {} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did.js b/e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did.js new file mode 100644 index 0000000000..5cff5a226b --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/CanDBPartition/CanDBPartition.did.js @@ -0,0 +1,428 @@ +export const idlFactory = ({ IDL }) => { + const OuterCanister = IDL.Rec(); + const Tree = IDL.Rec(); + const AutoScalingCanisterSharedFunctionHook = IDL.Func( + [IDL.Text], + [IDL.Text], + [], + ); + const ScalingLimitType = IDL.Variant({ + 'heapSize' : IDL.Nat, + 'count' : IDL.Nat, + }); + const ScalingOptions = IDL.Record({ + 'autoScalingHook' : AutoScalingCanisterSharedFunctionHook, + 'sizeLimit' : ScalingLimitType, + }); + const SK = IDL.Text; + const DeleteOptions = IDL.Record({ 'sk' : SK }); + const GetOptions = IDL.Record({ 'sk' : SK }); + const PK = IDL.Text; + const Color = IDL.Variant({ 'B' : IDL.Null, 'R' : IDL.Null }); + const AttributeKey = IDL.Text; + const AttributeValuePrimitive = IDL.Variant({ + 'int' : IDL.Int, + 'float' : IDL.Float64, + 'bool' : IDL.Bool, + 'text' : IDL.Text, + }); + const AttributeValue = IDL.Variant({ + 'int' : IDL.Int, + 'float' : IDL.Float64, + 'tuple' : IDL.Vec(AttributeValuePrimitive), + 'bool' : IDL.Bool, + 'text' : IDL.Text, + 'arrayBool' : IDL.Vec(IDL.Bool), + 'arrayText' : IDL.Vec(IDL.Text), + 'arrayInt' : IDL.Vec(IDL.Int), + 'arrayFloat' : IDL.Vec(IDL.Float64), + }); + Tree.fill( + IDL.Variant({ + 'leaf' : IDL.Null, + 'node' : IDL.Tuple( + Color, + Tree, + IDL.Tuple(AttributeKey, IDL.Opt(AttributeValue)), + Tree, + ), + }) + ); + const AttributeMap = IDL.Variant({ + 'leaf' : IDL.Null, + 'node' : IDL.Tuple( + Color, + Tree, + IDL.Tuple(AttributeKey, IDL.Opt(AttributeValue)), + Tree, + ), + }); + const Entity = IDL.Record({ + 'pk' : PK, + 'sk' : SK, + 'attributes' : AttributeMap, + }); + const ItemDetails = IDL.Variant({ + 'link' : IDL.Text, + 'post' : IDL.Null, + 'message' : IDL.Null, + 'folder' : IDL.Null, + }); + const ItemDataWithoutOwner = IDL.Record({ + 'title' : IDL.Text, + 'locale' : IDL.Text, + 'description' : IDL.Text, + 'details' : ItemDetails, + 'price' : IDL.Float64, + }); + const ItemData = IDL.Record({ + 'creator' : IDL.Principal, + 'edited' : IDL.Bool, + 'item' : ItemDataWithoutOwner, + }); + const ItemTransfer = IDL.Record({ 'data' : ItemData, 'communal' : IDL.Bool }); + const OuterSubDBKey = IDL.Nat; + const InnerSubDBKey = IDL.Nat; + const SK__1 = IDL.Text; + const AttributeValuePrimitive__1 = IDL.Variant({ + 'int' : IDL.Int, + 'float' : IDL.Float64, + 'bool' : IDL.Bool, + 'text' : IDL.Text, + }); + const AttributeValue__1 = IDL.Variant({ + 'int' : IDL.Int, + 'float' : IDL.Float64, + 'tuple' : IDL.Vec(AttributeValuePrimitive__1), + 'bool' : IDL.Bool, + 'text' : IDL.Text, + 'arrayBool' : IDL.Vec(IDL.Bool), + 'arrayText' : IDL.Vec(IDL.Text), + 'arrayInt' : IDL.Vec(IDL.Int), + 'arrayFloat' : IDL.Vec(IDL.Float64), + }); + const OuterPair = IDL.Record({ + 'key' : OuterSubDBKey, + 'canister' : OuterCanister, + }); + const GetByOuterPartitionKeyOptions = IDL.Record({ + 'sk' : SK__1, + 'outer' : OuterPair, + }); + const GetUserDataOuterOptions = IDL.Record({ 'outer' : OuterPair }); + const Direction = IDL.Variant({ 'bwd' : IDL.Null, 'fwd' : IDL.Null }); + const ScanLimitResult = IDL.Record({ + 'results' : IDL.Vec(IDL.Tuple(IDL.Text, AttributeValue__1)), + 'nextKey' : IDL.Opt(IDL.Text), + }); + const SubDBSizeOuterOptions = IDL.Record({ 'outer' : OuterPair }); + OuterCanister.fill( + IDL.Service({ + 'createOuter' : IDL.Func( + [ + IDL.Record({ + 'part' : IDL.Principal, + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }), + ], + [ + IDL.Record({ + 'outer' : IDL.Record({ + 'key' : OuterSubDBKey, + 'canister' : IDL.Principal, + }), + 'inner' : IDL.Record({ + 'key' : InnerSubDBKey, + 'canister' : IDL.Principal, + }), + }), + ], + [], + ), + 'deleteInner' : IDL.Func( + [IDL.Record({ 'sk' : SK__1, 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'deleteSubDBInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'deleteSubDBOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [], + [], + ), + 'getByInner' : IDL.Func( + [IDL.Record({ 'sk' : SK__1, 'innerKey' : InnerSubDBKey })], + [IDL.Opt(AttributeValue__1)], + ['query'], + ), + 'getByOuter' : IDL.Func( + [IDL.Record({ 'sk' : SK__1, 'outerKey' : OuterSubDBKey })], + [IDL.Opt(AttributeValue__1)], + [], + ), + 'getInner' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [ + IDL.Opt( + IDL.Record({ 'key' : InnerSubDBKey, 'canister' : IDL.Principal }) + ), + ], + ['query'], + ), + 'getOuter' : IDL.Func( + [GetByOuterPartitionKeyOptions], + [IDL.Opt(AttributeValue__1)], + [], + ), + 'getSubDBUserDataInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Opt(IDL.Text)], + [], + ), + 'getSubDBUserDataOuter' : IDL.Func( + [GetUserDataOuterOptions], + [IDL.Opt(IDL.Text)], + [], + ), + 'hasByInner' : IDL.Func( + [IDL.Record({ 'sk' : SK__1, 'innerKey' : InnerSubDBKey })], + [IDL.Bool], + ['query'], + ), + 'hasByOuter' : IDL.Func( + [IDL.Record({ 'sk' : SK__1, 'outerKey' : OuterSubDBKey })], + [IDL.Bool], + [], + ), + 'hasSubDBByInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Bool], + ['query'], + ), + 'hasSubDBByOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [IDL.Bool], + [], + ), + 'isOverflowed' : IDL.Func([], [IDL.Bool], ['query']), + 'putLocation' : IDL.Func( + [ + IDL.Record({ + 'newInnerSubDBKey' : InnerSubDBKey, + 'innerCanister' : IDL.Principal, + 'outerKey' : OuterSubDBKey, + }), + ], + [], + [], + ), + 'rawDeleteSubDB' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'rawGetSubDB' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [ + IDL.Opt( + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK__1, AttributeValue__1)), + 'userData' : IDL.Text, + }) + ), + ], + ['query'], + ), + 'rawInsertSubDB' : IDL.Func( + [ + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK__1, AttributeValue__1)), + 'userData' : IDL.Text, + 'hardCap' : IDL.Opt(IDL.Nat), + 'innerKey' : IDL.Opt(InnerSubDBKey), + }), + ], + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + ), + 'rawInsertSubDBAndSetOuter' : IDL.Func( + [ + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK__1, AttributeValue__1)), + 'userData' : IDL.Text, + 'keys' : IDL.Opt( + IDL.Record({ + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }) + ), + 'hardCap' : IDL.Opt(IDL.Nat), + }), + ], + [ + IDL.Record({ + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }), + ], + [], + ), + 'scanLimitInner' : IDL.Func( + [ + IDL.Record({ + 'dir' : Direction, + 'lowerBound' : SK__1, + 'limit' : IDL.Nat, + 'upperBound' : SK__1, + 'innerKey' : InnerSubDBKey, + }), + ], + [ScanLimitResult], + ['query'], + ), + 'scanLimitOuter' : IDL.Func( + [ + IDL.Record({ + 'dir' : Direction, + 'lowerBound' : SK__1, + 'limit' : IDL.Nat, + 'upperBound' : SK__1, + 'outerKey' : OuterSubDBKey, + }), + ], + [ScanLimitResult], + [], + ), + 'scanSubDBs' : IDL.Func( + [], + [ + IDL.Vec( + IDL.Tuple( + OuterSubDBKey, + IDL.Record({ + 'key' : InnerSubDBKey, + 'canister' : IDL.Principal, + }), + ) + ), + ], + ['query'], + ), + 'startInsertingImpl' : IDL.Func( + [ + IDL.Record({ + 'sk' : SK__1, + 'value' : AttributeValue__1, + 'innerKey' : InnerSubDBKey, + }), + ], + [], + [], + ), + 'subDBSizeByInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Opt(IDL.Nat)], + ['query'], + ), + 'subDBSizeByOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [IDL.Opt(IDL.Nat)], + [], + ), + 'subDBSizeOuterImpl' : IDL.Func( + [SubDBSizeOuterOptions], + [IDL.Opt(IDL.Nat)], + [], + ), + 'superDBSize' : IDL.Func([], [IDL.Nat], ['query']), + }) + ); + const Order = IDL.Record({ + 'reverse' : IDL.Tuple(OuterCanister, OuterSubDBKey), + 'order' : IDL.Tuple(OuterCanister, OuterSubDBKey), + }); + const Streams = IDL.Vec(IDL.Opt(Order)); + const PutOptions = IDL.Record({ + 'sk' : SK, + 'attributes' : IDL.Vec(IDL.Tuple(AttributeKey, AttributeValue)), + }); + const ScanOptions = IDL.Record({ + 'limit' : IDL.Nat, + 'ascending' : IDL.Opt(IDL.Bool), + 'skLowerBound' : SK, + 'skUpperBound' : SK, + }); + const ScanResult = IDL.Record({ + 'entities' : IDL.Vec(Entity), + 'nextKey' : IDL.Opt(SK), + }); + const CanDBPartition = IDL.Service({ + 'delete' : IDL.Func([DeleteOptions], [], []), + 'get' : IDL.Func([GetOptions], [IDL.Opt(Entity)], ['query']), + 'getAttribute' : IDL.Func( + [GetOptions, IDL.Text], + [IDL.Opt(AttributeValue)], + ['query'], + ), + 'getItem' : IDL.Func([IDL.Nat], [IDL.Opt(ItemTransfer)], []), + 'getOwners' : IDL.Func([], [IDL.Vec(IDL.Principal)], ['query']), + 'getPK' : IDL.Func([], [IDL.Text], ['query']), + 'getStreams' : IDL.Func([IDL.Nat, IDL.Text], [IDL.Opt(Streams)], ['query']), + 'put' : IDL.Func([PutOptions], [], []), + 'putAttribute' : IDL.Func( + [ + IDL.Record({ + 'sk' : SK, + 'key' : AttributeKey, + 'value' : AttributeValue, + }), + ], + [], + [], + ), + 'putExisting' : IDL.Func([PutOptions], [IDL.Bool], []), + 'putExistingAttribute' : IDL.Func( + [ + IDL.Record({ + 'sk' : SK, + 'key' : AttributeKey, + 'value' : AttributeValue, + }), + ], + [IDL.Bool], + [], + ), + 'scan' : IDL.Func([ScanOptions], [ScanResult], ['query']), + 'setOwners' : IDL.Func([IDL.Vec(IDL.Principal)], [], []), + 'skExists' : IDL.Func([IDL.Text], [IDL.Bool], ['query']), + 'transferCycles' : IDL.Func([], [], []), + }); + return CanDBPartition; +}; +export const init = ({ IDL }) => { + const AutoScalingCanisterSharedFunctionHook = IDL.Func( + [IDL.Text], + [IDL.Text], + [], + ); + const ScalingLimitType = IDL.Variant({ + 'heapSize' : IDL.Nat, + 'count' : IDL.Nat, + }); + const ScalingOptions = IDL.Record({ + 'autoScalingHook' : AutoScalingCanisterSharedFunctionHook, + 'sizeLimit' : ScalingLimitType, + }); + return [ + IDL.Record({ + 'owners' : IDL.Opt(IDL.Vec(IDL.Principal)), + 'partitionKey' : IDL.Text, + 'scalingOptions' : ScalingOptions, + }), + ]; +}; diff --git a/e2e/assets/wrong_ids/src/declarations/CanDBPartition/index.d.ts b/e2e/assets/wrong_ids/src/declarations/CanDBPartition/index.d.ts new file mode 100644 index 0000000000..ddb87bfca0 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/CanDBPartition/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './CanDBPartition.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const CanDBPartition: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/CanDBPartition/index.js b/e2e/assets/wrong_ids/src/declarations/CanDBPartition/index.js new file mode 100644 index 0000000000..004147f1af --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/CanDBPartition/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./CanDBPartition.did.js"; +export { idlFactory } from "./CanDBPartition.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_CANDBPARTITION; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const CanDBPartition = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did b/e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did new file mode 100644 index 0000000000..8486d7e147 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did @@ -0,0 +1,246 @@ +type SubDBSizeOuterOptions = record {outer: OuterPair;}; +type ScanLimitResult = + record { + nextKey: opt text; + results: vec record { + text; + AttributeValue; + }; + }; +type SK = text; +type Result = + variant { + err: text; + ok: + record { + inner: record { + canister: principal; + key: InnerSubDBKey; + }; + outer: record { + canister: principal; + key: OuterSubDBKey; + }; + }; + }; +type OuterSubDBKey = nat; +type OuterPair = + record { + canister: OuterCanister; + key: OuterSubDBKey; + }; +type OuterCanister = + service { + createOuter: + (record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + part: principal; + }) -> + (record { + inner: record { + canister: principal; + key: InnerSubDBKey; + }; + outer: record { + canister: principal; + key: OuterSubDBKey; + }; + }); + deleteInner: (record { + innerKey: InnerSubDBKey; + sk: SK; + }) -> (); + deleteSubDBInner: (record {innerKey: InnerSubDBKey;}) -> (); + deleteSubDBOuter: (record {outerKey: OuterSubDBKey;}) -> (); + getByInner: (record { + innerKey: InnerSubDBKey; + sk: SK; + }) -> (opt AttributeValue) query; + getByOuter: (record { + outerKey: OuterSubDBKey; + sk: SK; + }) -> (opt AttributeValue); + getInner: (record {outerKey: OuterSubDBKey;}) -> + (opt record { + canister: principal; + key: InnerSubDBKey; + }) query; + getOuter: (GetByOuterPartitionKeyOptions) -> (opt AttributeValue); + getSubDBUserDataInner: (record {innerKey: InnerSubDBKey;}) -> (opt text); + getSubDBUserDataOuter: (GetUserDataOuterOptions) -> (opt text); + hasByInner: (record { + innerKey: InnerSubDBKey; + sk: SK; + }) -> (bool) query; + hasByOuter: (record { + outerKey: OuterSubDBKey; + sk: SK; + }) -> (bool); + hasSubDBByInner: (record {innerKey: InnerSubDBKey;}) -> (bool) query; + hasSubDBByOuter: (record {outerKey: OuterSubDBKey;}) -> (bool); + isOverflowed: () -> (bool) query; + putLocation: + (record { + innerCanister: principal; + newInnerSubDBKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }) -> (); + rawDeleteSubDB: (record {innerKey: InnerSubDBKey;}) -> (); + rawGetSubDB: (record {innerKey: InnerSubDBKey;}) -> + (opt record { + map: vec record { + SK; + AttributeValue; + }; + userData: text; + }) query; + rawInsertSubDB: + (record { + hardCap: opt nat; + innerKey: opt InnerSubDBKey; + map: vec record { + SK; + AttributeValue; + }; + userData: text; + }) -> (record {innerKey: InnerSubDBKey;}); + rawInsertSubDBAndSetOuter: + (record { + hardCap: opt nat; + keys: opt record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }; + map: vec record { + SK; + AttributeValue; + }; + userData: text; + }) -> (record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }); + scanLimitInner: + (record { + dir: Direction; + innerKey: InnerSubDBKey; + limit: nat; + lowerBound: SK; + upperBound: SK; + }) -> (ScanLimitResult) query; + scanLimitOuter: + (record { + dir: Direction; + limit: nat; + lowerBound: SK; + outerKey: OuterSubDBKey; + upperBound: SK; + }) -> (ScanLimitResult); + scanSubDBs: () -> + (vec + record { + OuterSubDBKey; + record { + canister: principal; + key: InnerSubDBKey; + }; + }) query; + startInsertingImpl: + (record { + innerKey: InnerSubDBKey; + sk: SK; + value: AttributeValue; + }) -> (); + subDBSizeByInner: (record {innerKey: InnerSubDBKey;}) -> (opt nat) query; + subDBSizeByOuter: (record {outerKey: OuterSubDBKey;}) -> (opt nat); + subDBSizeOuterImpl: (SubDBSizeOuterOptions) -> (opt nat); + superDBSize: () -> (nat) query; + }; +type Order = + record { + order: record { + OuterCanister; + OuterSubDBKey; + }; + reverse: record { + OuterCanister; + OuterSubDBKey; + }; + }; +type NacDBIndex = + service { + createPartition: () -> (principal); + createPartitionImpl: () -> (principal); + createSubDB: (vec nat8, record { + hardCap: opt nat; + userData: text; + }) -> + (record { + inner: record { + canister: principal; + key: InnerSubDBKey; + }; + outer: record { + canister: principal; + key: OuterSubDBKey; + }; + }); + delete: (vec nat8, + record { + outerCanister: principal; + outerKey: OuterSubDBKey; + sk: SK; + }) -> (); + deleteSubDB: (vec nat8, + record { + outerCanister: principal; + outerKey: OuterSubDBKey; + }) -> (); + getAllItemsStream: () -> (Order) query; + getCanisters: () -> (vec principal) query; + getOwners: () -> (vec principal) query; + init: (vec principal) -> (); + insert: (vec nat8, + record { + hardCap: opt nat; + outerCanister: principal; + outerKey: OuterSubDBKey; + sk: SK; + value: AttributeValue; + }) -> (Result); + setOwners: (vec principal) -> (); + upgradeCanistersInRange: (blob, nat, nat) -> (); + }; +type InnerSubDBKey = nat; +type GetUserDataOuterOptions = record {outer: OuterPair;}; +type GetByOuterPartitionKeyOptions = + record { + outer: OuterPair; + sk: SK; + }; +type Direction = + variant { + bwd; + fwd; + }; +type AttributeValuePrimitive = + variant { + "bool": bool; + float: float64; + "int": int; + "text": text; + }; +type AttributeValue = + variant { + arrayBool: vec bool; + arrayFloat: vec float64; + arrayInt: vec int; + arrayText: vec text; + "bool": bool; + float: float64; + "int": int; + "text": text; + tuple: vec AttributeValuePrimitive; + }; +service : () -> NacDBIndex diff --git a/e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did.d.ts b/e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did.d.ts new file mode 100644 index 0000000000..1ab4b212c3 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did.d.ts @@ -0,0 +1,231 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export type AttributeValue = { 'int' : bigint } | + { 'float' : number } | + { 'tuple' : Array } | + { 'bool' : boolean } | + { 'text' : string } | + { 'arrayBool' : Array } | + { 'arrayText' : Array } | + { 'arrayInt' : Array } | + { 'arrayFloat' : Array }; +export type AttributeValuePrimitive = { 'int' : bigint } | + { 'float' : number } | + { 'bool' : boolean } | + { 'text' : string }; +export type Direction = { 'bwd' : null } | + { 'fwd' : null }; +export interface GetByOuterPartitionKeyOptions { + 'sk' : SK, + 'outer' : OuterPair, +} +export interface GetUserDataOuterOptions { 'outer' : OuterPair } +export type InnerSubDBKey = bigint; +export interface NacDBIndex { + 'createPartition' : ActorMethod<[], Principal>, + 'createPartitionImpl' : ActorMethod<[], Principal>, + 'createSubDB' : ActorMethod< + [Uint8Array | number[], { 'userData' : string, 'hardCap' : [] | [bigint] }], + { + 'outer' : { 'key' : OuterSubDBKey, 'canister' : Principal }, + 'inner' : { 'key' : InnerSubDBKey, 'canister' : Principal }, + } + >, + 'delete' : ActorMethod< + [ + Uint8Array | number[], + { 'sk' : SK, 'outerKey' : OuterSubDBKey, 'outerCanister' : Principal }, + ], + undefined + >, + 'deleteSubDB' : ActorMethod< + [ + Uint8Array | number[], + { 'outerKey' : OuterSubDBKey, 'outerCanister' : Principal }, + ], + undefined + >, + 'getAllItemsStream' : ActorMethod<[], Order>, + 'getCanisters' : ActorMethod<[], Array>, + 'getOwners' : ActorMethod<[], Array>, + 'init' : ActorMethod<[Array], undefined>, + 'insert' : ActorMethod< + [ + Uint8Array | number[], + { + 'sk' : SK, + 'value' : AttributeValue, + 'hardCap' : [] | [bigint], + 'outerKey' : OuterSubDBKey, + 'outerCanister' : Principal, + }, + ], + Result + >, + 'setOwners' : ActorMethod<[Array], undefined>, + 'upgradeCanistersInRange' : ActorMethod< + [Uint8Array | number[], bigint, bigint], + undefined + >, +} +export interface Order { + 'reverse' : [OuterCanister, OuterSubDBKey], + 'order' : [OuterCanister, OuterSubDBKey], +} +export interface OuterCanister { + 'createOuter' : ActorMethod< + [ + { + 'part' : Principal, + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }, + ], + { + 'outer' : { 'key' : OuterSubDBKey, 'canister' : Principal }, + 'inner' : { 'key' : InnerSubDBKey, 'canister' : Principal }, + } + >, + 'deleteInner' : ActorMethod< + [{ 'sk' : SK, 'innerKey' : InnerSubDBKey }], + undefined + >, + 'deleteSubDBInner' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], undefined>, + 'deleteSubDBOuter' : ActorMethod<[{ 'outerKey' : OuterSubDBKey }], undefined>, + 'getByInner' : ActorMethod< + [{ 'sk' : SK, 'innerKey' : InnerSubDBKey }], + [] | [AttributeValue] + >, + 'getByOuter' : ActorMethod< + [{ 'sk' : SK, 'outerKey' : OuterSubDBKey }], + [] | [AttributeValue] + >, + 'getInner' : ActorMethod< + [{ 'outerKey' : OuterSubDBKey }], + [] | [{ 'key' : InnerSubDBKey, 'canister' : Principal }] + >, + 'getOuter' : ActorMethod< + [GetByOuterPartitionKeyOptions], + [] | [AttributeValue] + >, + 'getSubDBUserDataInner' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [string] + >, + 'getSubDBUserDataOuter' : ActorMethod< + [GetUserDataOuterOptions], + [] | [string] + >, + 'hasByInner' : ActorMethod< + [{ 'sk' : SK, 'innerKey' : InnerSubDBKey }], + boolean + >, + 'hasByOuter' : ActorMethod< + [{ 'sk' : SK, 'outerKey' : OuterSubDBKey }], + boolean + >, + 'hasSubDBByInner' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], boolean>, + 'hasSubDBByOuter' : ActorMethod<[{ 'outerKey' : OuterSubDBKey }], boolean>, + 'isOverflowed' : ActorMethod<[], boolean>, + 'putLocation' : ActorMethod< + [ + { + 'newInnerSubDBKey' : InnerSubDBKey, + 'innerCanister' : Principal, + 'outerKey' : OuterSubDBKey, + }, + ], + undefined + >, + 'rawDeleteSubDB' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], undefined>, + 'rawGetSubDB' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [{ 'map' : Array<[SK, AttributeValue]>, 'userData' : string }] + >, + 'rawInsertSubDB' : ActorMethod< + [ + { + 'map' : Array<[SK, AttributeValue]>, + 'userData' : string, + 'hardCap' : [] | [bigint], + 'innerKey' : [] | [InnerSubDBKey], + }, + ], + { 'innerKey' : InnerSubDBKey } + >, + 'rawInsertSubDBAndSetOuter' : ActorMethod< + [ + { + 'map' : Array<[SK, AttributeValue]>, + 'userData' : string, + 'keys' : [] | [ + { 'outerKey' : OuterSubDBKey, 'innerKey' : InnerSubDBKey } + ], + 'hardCap' : [] | [bigint], + }, + ], + { 'outerKey' : OuterSubDBKey, 'innerKey' : InnerSubDBKey } + >, + 'scanLimitInner' : ActorMethod< + [ + { + 'dir' : Direction, + 'lowerBound' : SK, + 'limit' : bigint, + 'upperBound' : SK, + 'innerKey' : InnerSubDBKey, + }, + ], + ScanLimitResult + >, + 'scanLimitOuter' : ActorMethod< + [ + { + 'dir' : Direction, + 'lowerBound' : SK, + 'limit' : bigint, + 'upperBound' : SK, + 'outerKey' : OuterSubDBKey, + }, + ], + ScanLimitResult + >, + 'scanSubDBs' : ActorMethod< + [], + Array<[OuterSubDBKey, { 'key' : InnerSubDBKey, 'canister' : Principal }]> + >, + 'startInsertingImpl' : ActorMethod< + [{ 'sk' : SK, 'value' : AttributeValue, 'innerKey' : InnerSubDBKey }], + undefined + >, + 'subDBSizeByInner' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [bigint] + >, + 'subDBSizeByOuter' : ActorMethod< + [{ 'outerKey' : OuterSubDBKey }], + [] | [bigint] + >, + 'subDBSizeOuterImpl' : ActorMethod<[SubDBSizeOuterOptions], [] | [bigint]>, + 'superDBSize' : ActorMethod<[], bigint>, +} +export interface OuterPair { 'key' : OuterSubDBKey, 'canister' : OuterCanister } +export type OuterSubDBKey = bigint; +export type Result = { + 'ok' : { + 'outer' : { 'key' : OuterSubDBKey, 'canister' : Principal }, + 'inner' : { 'key' : InnerSubDBKey, 'canister' : Principal }, + } + } | + { 'err' : string }; +export type SK = string; +export interface ScanLimitResult { + 'results' : Array<[string, AttributeValue]>, + 'nextKey' : [] | [string], +} +export interface SubDBSizeOuterOptions { 'outer' : OuterPair } +export interface _SERVICE extends NacDBIndex {} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did.js b/e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did.js new file mode 100644 index 0000000000..489051b0c3 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/NacDBIndex/NacDBIndex.did.js @@ -0,0 +1,353 @@ +export const idlFactory = ({ IDL }) => { + const OuterCanister = IDL.Rec(); + const OuterSubDBKey = IDL.Nat; + const InnerSubDBKey = IDL.Nat; + const SK = IDL.Text; + const AttributeValuePrimitive = IDL.Variant({ + 'int' : IDL.Int, + 'float' : IDL.Float64, + 'bool' : IDL.Bool, + 'text' : IDL.Text, + }); + const AttributeValue = IDL.Variant({ + 'int' : IDL.Int, + 'float' : IDL.Float64, + 'tuple' : IDL.Vec(AttributeValuePrimitive), + 'bool' : IDL.Bool, + 'text' : IDL.Text, + 'arrayBool' : IDL.Vec(IDL.Bool), + 'arrayText' : IDL.Vec(IDL.Text), + 'arrayInt' : IDL.Vec(IDL.Int), + 'arrayFloat' : IDL.Vec(IDL.Float64), + }); + const OuterPair = IDL.Record({ + 'key' : OuterSubDBKey, + 'canister' : OuterCanister, + }); + const GetByOuterPartitionKeyOptions = IDL.Record({ + 'sk' : SK, + 'outer' : OuterPair, + }); + const GetUserDataOuterOptions = IDL.Record({ 'outer' : OuterPair }); + const Direction = IDL.Variant({ 'bwd' : IDL.Null, 'fwd' : IDL.Null }); + const ScanLimitResult = IDL.Record({ + 'results' : IDL.Vec(IDL.Tuple(IDL.Text, AttributeValue)), + 'nextKey' : IDL.Opt(IDL.Text), + }); + const SubDBSizeOuterOptions = IDL.Record({ 'outer' : OuterPair }); + OuterCanister.fill( + IDL.Service({ + 'createOuter' : IDL.Func( + [ + IDL.Record({ + 'part' : IDL.Principal, + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }), + ], + [ + IDL.Record({ + 'outer' : IDL.Record({ + 'key' : OuterSubDBKey, + 'canister' : IDL.Principal, + }), + 'inner' : IDL.Record({ + 'key' : InnerSubDBKey, + 'canister' : IDL.Principal, + }), + }), + ], + [], + ), + 'deleteInner' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'deleteSubDBInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'deleteSubDBOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [], + [], + ), + 'getByInner' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'innerKey' : InnerSubDBKey })], + [IDL.Opt(AttributeValue)], + ['query'], + ), + 'getByOuter' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'outerKey' : OuterSubDBKey })], + [IDL.Opt(AttributeValue)], + [], + ), + 'getInner' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [ + IDL.Opt( + IDL.Record({ 'key' : InnerSubDBKey, 'canister' : IDL.Principal }) + ), + ], + ['query'], + ), + 'getOuter' : IDL.Func( + [GetByOuterPartitionKeyOptions], + [IDL.Opt(AttributeValue)], + [], + ), + 'getSubDBUserDataInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Opt(IDL.Text)], + [], + ), + 'getSubDBUserDataOuter' : IDL.Func( + [GetUserDataOuterOptions], + [IDL.Opt(IDL.Text)], + [], + ), + 'hasByInner' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'innerKey' : InnerSubDBKey })], + [IDL.Bool], + ['query'], + ), + 'hasByOuter' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'outerKey' : OuterSubDBKey })], + [IDL.Bool], + [], + ), + 'hasSubDBByInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Bool], + ['query'], + ), + 'hasSubDBByOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [IDL.Bool], + [], + ), + 'isOverflowed' : IDL.Func([], [IDL.Bool], ['query']), + 'putLocation' : IDL.Func( + [ + IDL.Record({ + 'newInnerSubDBKey' : InnerSubDBKey, + 'innerCanister' : IDL.Principal, + 'outerKey' : OuterSubDBKey, + }), + ], + [], + [], + ), + 'rawDeleteSubDB' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'rawGetSubDB' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [ + IDL.Opt( + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK, AttributeValue)), + 'userData' : IDL.Text, + }) + ), + ], + ['query'], + ), + 'rawInsertSubDB' : IDL.Func( + [ + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK, AttributeValue)), + 'userData' : IDL.Text, + 'hardCap' : IDL.Opt(IDL.Nat), + 'innerKey' : IDL.Opt(InnerSubDBKey), + }), + ], + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + ), + 'rawInsertSubDBAndSetOuter' : IDL.Func( + [ + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK, AttributeValue)), + 'userData' : IDL.Text, + 'keys' : IDL.Opt( + IDL.Record({ + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }) + ), + 'hardCap' : IDL.Opt(IDL.Nat), + }), + ], + [ + IDL.Record({ + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }), + ], + [], + ), + 'scanLimitInner' : IDL.Func( + [ + IDL.Record({ + 'dir' : Direction, + 'lowerBound' : SK, + 'limit' : IDL.Nat, + 'upperBound' : SK, + 'innerKey' : InnerSubDBKey, + }), + ], + [ScanLimitResult], + ['query'], + ), + 'scanLimitOuter' : IDL.Func( + [ + IDL.Record({ + 'dir' : Direction, + 'lowerBound' : SK, + 'limit' : IDL.Nat, + 'upperBound' : SK, + 'outerKey' : OuterSubDBKey, + }), + ], + [ScanLimitResult], + [], + ), + 'scanSubDBs' : IDL.Func( + [], + [ + IDL.Vec( + IDL.Tuple( + OuterSubDBKey, + IDL.Record({ + 'key' : InnerSubDBKey, + 'canister' : IDL.Principal, + }), + ) + ), + ], + ['query'], + ), + 'startInsertingImpl' : IDL.Func( + [ + IDL.Record({ + 'sk' : SK, + 'value' : AttributeValue, + 'innerKey' : InnerSubDBKey, + }), + ], + [], + [], + ), + 'subDBSizeByInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Opt(IDL.Nat)], + ['query'], + ), + 'subDBSizeByOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [IDL.Opt(IDL.Nat)], + [], + ), + 'subDBSizeOuterImpl' : IDL.Func( + [SubDBSizeOuterOptions], + [IDL.Opt(IDL.Nat)], + [], + ), + 'superDBSize' : IDL.Func([], [IDL.Nat], ['query']), + }) + ); + const Order = IDL.Record({ + 'reverse' : IDL.Tuple(OuterCanister, OuterSubDBKey), + 'order' : IDL.Tuple(OuterCanister, OuterSubDBKey), + }); + const Result = IDL.Variant({ + 'ok' : IDL.Record({ + 'outer' : IDL.Record({ + 'key' : OuterSubDBKey, + 'canister' : IDL.Principal, + }), + 'inner' : IDL.Record({ + 'key' : InnerSubDBKey, + 'canister' : IDL.Principal, + }), + }), + 'err' : IDL.Text, + }); + const NacDBIndex = IDL.Service({ + 'createPartition' : IDL.Func([], [IDL.Principal], []), + 'createPartitionImpl' : IDL.Func([], [IDL.Principal], []), + 'createSubDB' : IDL.Func( + [ + IDL.Vec(IDL.Nat8), + IDL.Record({ 'userData' : IDL.Text, 'hardCap' : IDL.Opt(IDL.Nat) }), + ], + [ + IDL.Record({ + 'outer' : IDL.Record({ + 'key' : OuterSubDBKey, + 'canister' : IDL.Principal, + }), + 'inner' : IDL.Record({ + 'key' : InnerSubDBKey, + 'canister' : IDL.Principal, + }), + }), + ], + [], + ), + 'delete' : IDL.Func( + [ + IDL.Vec(IDL.Nat8), + IDL.Record({ + 'sk' : SK, + 'outerKey' : OuterSubDBKey, + 'outerCanister' : IDL.Principal, + }), + ], + [], + [], + ), + 'deleteSubDB' : IDL.Func( + [ + IDL.Vec(IDL.Nat8), + IDL.Record({ + 'outerKey' : OuterSubDBKey, + 'outerCanister' : IDL.Principal, + }), + ], + [], + [], + ), + 'getAllItemsStream' : IDL.Func([], [Order], ['query']), + 'getCanisters' : IDL.Func([], [IDL.Vec(IDL.Principal)], ['query']), + 'getOwners' : IDL.Func([], [IDL.Vec(IDL.Principal)], ['query']), + 'init' : IDL.Func([IDL.Vec(IDL.Principal)], [], []), + 'insert' : IDL.Func( + [ + IDL.Vec(IDL.Nat8), + IDL.Record({ + 'sk' : SK, + 'value' : AttributeValue, + 'hardCap' : IDL.Opt(IDL.Nat), + 'outerKey' : OuterSubDBKey, + 'outerCanister' : IDL.Principal, + }), + ], + [Result], + [], + ), + 'setOwners' : IDL.Func([IDL.Vec(IDL.Principal)], [], []), + 'upgradeCanistersInRange' : IDL.Func( + [IDL.Vec(IDL.Nat8), IDL.Nat, IDL.Nat], + [], + [], + ), + }); + return NacDBIndex; +}; +export const init = ({ IDL }) => { return []; }; diff --git a/e2e/assets/wrong_ids/src/declarations/NacDBIndex/index.d.ts b/e2e/assets/wrong_ids/src/declarations/NacDBIndex/index.d.ts new file mode 100644 index 0000000000..68e33af83c --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/NacDBIndex/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './NacDBIndex.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const NacDBIndex: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/NacDBIndex/index.js b/e2e/assets/wrong_ids/src/declarations/NacDBIndex/index.js new file mode 100644 index 0000000000..56dfbd2a1a --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/NacDBIndex/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./NacDBIndex.did.js"; +export { idlFactory } from "./NacDBIndex.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_NACDBINDEX; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const NacDBIndex = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did b/e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did new file mode 100644 index 0000000000..0326886e47 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did @@ -0,0 +1,319 @@ +type SubDBSizeOuterOptions = record {outer: OuterPair;}; +type ScanLimitResult__1 = + record { + nextKey: opt text; + results: vec record { + text; + AttributeValue; + }; + }; +type ScanLimitResult = + record { + nextKey: opt text; + results: vec record { + text; + AttributeValue; + }; + }; +type SK = text; +type Partition = + service { + createOuter: + (record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + part: principal; + }) -> + (record { + inner: record { + canister: principal; + key: InnerSubDBKey; + }; + outer: record { + canister: principal; + key: OuterSubDBKey; + }; + }); + deleteInner: (record { + innerKey: InnerSubDBKey; + sk: SK; + }) -> (); + deleteSubDBInner: (record {innerKey: InnerSubDBKey;}) -> (); + deleteSubDBOuter: (record {outerKey: OuterSubDBKey;}) -> (); + getByInner: (record { + innerKey: InnerSubDBKey; + sk: SK; + }) -> (opt AttributeValue) query; + getByOuter: (record { + outerKey: OuterSubDBKey; + sk: SK; + }) -> (opt AttributeValue); + getInner: (record {outerKey: OuterSubDBKey;}) -> + (opt record { + canister: principal; + key: InnerSubDBKey; + }) query; + getOuter: (GetByOuterPartitionKeyOptions) -> (opt AttributeValue); + getOwners: () -> (vec principal) query; + getSubDBUserDataInner: (record {innerKey: InnerSubDBKey;}) -> (opt text); + getSubDBUserDataOuter: (GetUserDataOuterOptions) -> (opt text); + hasByInner: (record { + innerKey: InnerSubDBKey; + sk: SK; + }) -> (bool) query; + hasByOuter: (record { + outerKey: OuterSubDBKey; + sk: SK; + }) -> (bool); + hasSubDBByInner: (record {innerKey: InnerSubDBKey;}) -> (bool) query; + hasSubDBByOuter: (record {outerKey: OuterSubDBKey;}) -> (bool); + isOverflowed: () -> (bool) query; + putLocation: + (record { + innerCanister: principal; + newInnerSubDBKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }) -> (); + rawDeleteSubDB: (record {innerKey: InnerSubDBKey;}) -> (); + rawGetSubDB: (record {innerKey: InnerSubDBKey;}) -> + (opt record { + map: vec record { + SK; + AttributeValue; + }; + userData: text; + }) query; + rawInsertSubDB: + (record { + hardCap: opt nat; + innerKey: opt InnerSubDBKey; + map: vec record { + SK; + AttributeValue; + }; + userData: text; + }) -> (record {innerKey: InnerSubDBKey;}); + rawInsertSubDBAndSetOuter: + (record { + hardCap: opt nat; + keys: opt record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }; + map: vec record { + SK; + AttributeValue; + }; + userData: text; + }) -> (record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }); + scanLimitInner: + (record { + dir: Direction__1; + innerKey: InnerSubDBKey; + limit: nat; + lowerBound: SK; + upperBound: SK; + }) -> (ScanLimitResult__1) query; + scanLimitOuter: + (record { + dir: Direction__1; + limit: nat; + lowerBound: SK; + outerKey: OuterSubDBKey; + upperBound: SK; + }) -> (ScanLimitResult__1); + scanSubDBs: () -> + (vec + record { + OuterSubDBKey; + record { + canister: principal; + key: InnerSubDBKey; + }; + }) query; + setOwners: (vec principal) -> (); + startInsertingImpl: + (record { + innerKey: InnerSubDBKey; + sk: SK; + value: AttributeValue; + }) -> (); + subDBSizeByInner: (record {innerKey: InnerSubDBKey;}) -> (opt nat) query; + subDBSizeByOuter: (record {outerKey: OuterSubDBKey;}) -> (opt nat); + subDBSizeOuterImpl: (SubDBSizeOuterOptions) -> (opt nat); + superDBSize: () -> (nat) query; + }; +type OuterSubDBKey = nat; +type OuterPair = + record { + canister: OuterCanister; + key: OuterSubDBKey; + }; +type OuterCanister = + service { + createOuter: + (record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + part: principal; + }) -> + (record { + inner: record { + canister: principal; + key: InnerSubDBKey; + }; + outer: record { + canister: principal; + key: OuterSubDBKey; + }; + }); + deleteInner: (record { + innerKey: InnerSubDBKey; + sk: SK; + }) -> (); + deleteSubDBInner: (record {innerKey: InnerSubDBKey;}) -> (); + deleteSubDBOuter: (record {outerKey: OuterSubDBKey;}) -> (); + getByInner: (record { + innerKey: InnerSubDBKey; + sk: SK; + }) -> (opt AttributeValue) query; + getByOuter: (record { + outerKey: OuterSubDBKey; + sk: SK; + }) -> (opt AttributeValue); + getInner: (record {outerKey: OuterSubDBKey;}) -> + (opt record { + canister: principal; + key: InnerSubDBKey; + }) query; + getOuter: (GetByOuterPartitionKeyOptions) -> (opt AttributeValue); + getSubDBUserDataInner: (record {innerKey: InnerSubDBKey;}) -> (opt text); + getSubDBUserDataOuter: (GetUserDataOuterOptions) -> (opt text); + hasByInner: (record { + innerKey: InnerSubDBKey; + sk: SK; + }) -> (bool) query; + hasByOuter: (record { + outerKey: OuterSubDBKey; + sk: SK; + }) -> (bool); + hasSubDBByInner: (record {innerKey: InnerSubDBKey;}) -> (bool) query; + hasSubDBByOuter: (record {outerKey: OuterSubDBKey;}) -> (bool); + isOverflowed: () -> (bool) query; + putLocation: + (record { + innerCanister: principal; + newInnerSubDBKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }) -> (); + rawDeleteSubDB: (record {innerKey: InnerSubDBKey;}) -> (); + rawGetSubDB: (record {innerKey: InnerSubDBKey;}) -> + (opt record { + map: vec record { + SK; + AttributeValue; + }; + userData: text; + }) query; + rawInsertSubDB: + (record { + hardCap: opt nat; + innerKey: opt InnerSubDBKey; + map: vec record { + SK; + AttributeValue; + }; + userData: text; + }) -> (record {innerKey: InnerSubDBKey;}); + rawInsertSubDBAndSetOuter: + (record { + hardCap: opt nat; + keys: opt record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }; + map: vec record { + SK; + AttributeValue; + }; + userData: text; + }) -> (record { + innerKey: InnerSubDBKey; + outerKey: OuterSubDBKey; + }); + scanLimitInner: + (record { + dir: Direction; + innerKey: InnerSubDBKey; + limit: nat; + lowerBound: SK; + upperBound: SK; + }) -> (ScanLimitResult) query; + scanLimitOuter: + (record { + dir: Direction; + limit: nat; + lowerBound: SK; + outerKey: OuterSubDBKey; + upperBound: SK; + }) -> (ScanLimitResult); + scanSubDBs: () -> + (vec + record { + OuterSubDBKey; + record { + canister: principal; + key: InnerSubDBKey; + }; + }) query; + startInsertingImpl: + (record { + innerKey: InnerSubDBKey; + sk: SK; + value: AttributeValue; + }) -> (); + subDBSizeByInner: (record {innerKey: InnerSubDBKey;}) -> (opt nat) query; + subDBSizeByOuter: (record {outerKey: OuterSubDBKey;}) -> (opt nat); + subDBSizeOuterImpl: (SubDBSizeOuterOptions) -> (opt nat); + superDBSize: () -> (nat) query; + }; +type InnerSubDBKey = nat; +type GetUserDataOuterOptions = record {outer: OuterPair;}; +type GetByOuterPartitionKeyOptions = + record { + outer: OuterPair; + sk: SK; + }; +type Direction__1 = + variant { + bwd; + fwd; + }; +type Direction = + variant { + bwd; + fwd; + }; +type AttributeValuePrimitive = + variant { + "bool": bool; + float: float64; + "int": int; + "text": text; + }; +type AttributeValue = + variant { + arrayBool: vec bool; + arrayFloat: vec float64; + arrayInt: vec int; + arrayText: vec text; + "bool": bool; + float: float64; + "int": int; + "text": text; + tuple: vec AttributeValuePrimitive; + }; +service : (vec principal) -> Partition diff --git a/e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did.d.ts b/e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did.d.ts new file mode 100644 index 0000000000..767ae2f661 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did.d.ts @@ -0,0 +1,318 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export type AttributeValue = { 'int' : bigint } | + { 'float' : number } | + { 'tuple' : Array } | + { 'bool' : boolean } | + { 'text' : string } | + { 'arrayBool' : Array } | + { 'arrayText' : Array } | + { 'arrayInt' : Array } | + { 'arrayFloat' : Array }; +export type AttributeValuePrimitive = { 'int' : bigint } | + { 'float' : number } | + { 'bool' : boolean } | + { 'text' : string }; +export type Direction = { 'bwd' : null } | + { 'fwd' : null }; +export type Direction__1 = { 'bwd' : null } | + { 'fwd' : null }; +export interface GetByOuterPartitionKeyOptions { + 'sk' : SK, + 'outer' : OuterPair, +} +export interface GetUserDataOuterOptions { 'outer' : OuterPair } +export type InnerSubDBKey = bigint; +export interface OuterCanister { + 'createOuter' : ActorMethod< + [ + { + 'part' : Principal, + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }, + ], + { + 'outer' : { 'key' : OuterSubDBKey, 'canister' : Principal }, + 'inner' : { 'key' : InnerSubDBKey, 'canister' : Principal }, + } + >, + 'deleteInner' : ActorMethod< + [{ 'sk' : SK, 'innerKey' : InnerSubDBKey }], + undefined + >, + 'deleteSubDBInner' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], undefined>, + 'deleteSubDBOuter' : ActorMethod<[{ 'outerKey' : OuterSubDBKey }], undefined>, + 'getByInner' : ActorMethod< + [{ 'sk' : SK, 'innerKey' : InnerSubDBKey }], + [] | [AttributeValue] + >, + 'getByOuter' : ActorMethod< + [{ 'sk' : SK, 'outerKey' : OuterSubDBKey }], + [] | [AttributeValue] + >, + 'getInner' : ActorMethod< + [{ 'outerKey' : OuterSubDBKey }], + [] | [{ 'key' : InnerSubDBKey, 'canister' : Principal }] + >, + 'getOuter' : ActorMethod< + [GetByOuterPartitionKeyOptions], + [] | [AttributeValue] + >, + 'getSubDBUserDataInner' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [string] + >, + 'getSubDBUserDataOuter' : ActorMethod< + [GetUserDataOuterOptions], + [] | [string] + >, + 'hasByInner' : ActorMethod< + [{ 'sk' : SK, 'innerKey' : InnerSubDBKey }], + boolean + >, + 'hasByOuter' : ActorMethod< + [{ 'sk' : SK, 'outerKey' : OuterSubDBKey }], + boolean + >, + 'hasSubDBByInner' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], boolean>, + 'hasSubDBByOuter' : ActorMethod<[{ 'outerKey' : OuterSubDBKey }], boolean>, + 'isOverflowed' : ActorMethod<[], boolean>, + 'putLocation' : ActorMethod< + [ + { + 'newInnerSubDBKey' : InnerSubDBKey, + 'innerCanister' : Principal, + 'outerKey' : OuterSubDBKey, + }, + ], + undefined + >, + 'rawDeleteSubDB' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], undefined>, + 'rawGetSubDB' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [{ 'map' : Array<[SK, AttributeValue]>, 'userData' : string }] + >, + 'rawInsertSubDB' : ActorMethod< + [ + { + 'map' : Array<[SK, AttributeValue]>, + 'userData' : string, + 'hardCap' : [] | [bigint], + 'innerKey' : [] | [InnerSubDBKey], + }, + ], + { 'innerKey' : InnerSubDBKey } + >, + 'rawInsertSubDBAndSetOuter' : ActorMethod< + [ + { + 'map' : Array<[SK, AttributeValue]>, + 'userData' : string, + 'keys' : [] | [ + { 'outerKey' : OuterSubDBKey, 'innerKey' : InnerSubDBKey } + ], + 'hardCap' : [] | [bigint], + }, + ], + { 'outerKey' : OuterSubDBKey, 'innerKey' : InnerSubDBKey } + >, + 'scanLimitInner' : ActorMethod< + [ + { + 'dir' : Direction, + 'lowerBound' : SK, + 'limit' : bigint, + 'upperBound' : SK, + 'innerKey' : InnerSubDBKey, + }, + ], + ScanLimitResult + >, + 'scanLimitOuter' : ActorMethod< + [ + { + 'dir' : Direction, + 'lowerBound' : SK, + 'limit' : bigint, + 'upperBound' : SK, + 'outerKey' : OuterSubDBKey, + }, + ], + ScanLimitResult + >, + 'scanSubDBs' : ActorMethod< + [], + Array<[OuterSubDBKey, { 'key' : InnerSubDBKey, 'canister' : Principal }]> + >, + 'startInsertingImpl' : ActorMethod< + [{ 'sk' : SK, 'value' : AttributeValue, 'innerKey' : InnerSubDBKey }], + undefined + >, + 'subDBSizeByInner' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [bigint] + >, + 'subDBSizeByOuter' : ActorMethod< + [{ 'outerKey' : OuterSubDBKey }], + [] | [bigint] + >, + 'subDBSizeOuterImpl' : ActorMethod<[SubDBSizeOuterOptions], [] | [bigint]>, + 'superDBSize' : ActorMethod<[], bigint>, +} +export interface OuterPair { 'key' : OuterSubDBKey, 'canister' : OuterCanister } +export type OuterSubDBKey = bigint; +export interface Partition { + 'createOuter' : ActorMethod< + [ + { + 'part' : Principal, + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }, + ], + { + 'outer' : { 'key' : OuterSubDBKey, 'canister' : Principal }, + 'inner' : { 'key' : InnerSubDBKey, 'canister' : Principal }, + } + >, + 'deleteInner' : ActorMethod< + [{ 'sk' : SK, 'innerKey' : InnerSubDBKey }], + undefined + >, + 'deleteSubDBInner' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], undefined>, + 'deleteSubDBOuter' : ActorMethod<[{ 'outerKey' : OuterSubDBKey }], undefined>, + 'getByInner' : ActorMethod< + [{ 'sk' : SK, 'innerKey' : InnerSubDBKey }], + [] | [AttributeValue] + >, + 'getByOuter' : ActorMethod< + [{ 'sk' : SK, 'outerKey' : OuterSubDBKey }], + [] | [AttributeValue] + >, + 'getInner' : ActorMethod< + [{ 'outerKey' : OuterSubDBKey }], + [] | [{ 'key' : InnerSubDBKey, 'canister' : Principal }] + >, + 'getOuter' : ActorMethod< + [GetByOuterPartitionKeyOptions], + [] | [AttributeValue] + >, + 'getOwners' : ActorMethod<[], Array>, + 'getSubDBUserDataInner' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [string] + >, + 'getSubDBUserDataOuter' : ActorMethod< + [GetUserDataOuterOptions], + [] | [string] + >, + 'hasByInner' : ActorMethod< + [{ 'sk' : SK, 'innerKey' : InnerSubDBKey }], + boolean + >, + 'hasByOuter' : ActorMethod< + [{ 'sk' : SK, 'outerKey' : OuterSubDBKey }], + boolean + >, + 'hasSubDBByInner' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], boolean>, + 'hasSubDBByOuter' : ActorMethod<[{ 'outerKey' : OuterSubDBKey }], boolean>, + 'isOverflowed' : ActorMethod<[], boolean>, + 'putLocation' : ActorMethod< + [ + { + 'newInnerSubDBKey' : InnerSubDBKey, + 'innerCanister' : Principal, + 'outerKey' : OuterSubDBKey, + }, + ], + undefined + >, + 'rawDeleteSubDB' : ActorMethod<[{ 'innerKey' : InnerSubDBKey }], undefined>, + 'rawGetSubDB' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [{ 'map' : Array<[SK, AttributeValue]>, 'userData' : string }] + >, + 'rawInsertSubDB' : ActorMethod< + [ + { + 'map' : Array<[SK, AttributeValue]>, + 'userData' : string, + 'hardCap' : [] | [bigint], + 'innerKey' : [] | [InnerSubDBKey], + }, + ], + { 'innerKey' : InnerSubDBKey } + >, + 'rawInsertSubDBAndSetOuter' : ActorMethod< + [ + { + 'map' : Array<[SK, AttributeValue]>, + 'userData' : string, + 'keys' : [] | [ + { 'outerKey' : OuterSubDBKey, 'innerKey' : InnerSubDBKey } + ], + 'hardCap' : [] | [bigint], + }, + ], + { 'outerKey' : OuterSubDBKey, 'innerKey' : InnerSubDBKey } + >, + 'scanLimitInner' : ActorMethod< + [ + { + 'dir' : Direction__1, + 'lowerBound' : SK, + 'limit' : bigint, + 'upperBound' : SK, + 'innerKey' : InnerSubDBKey, + }, + ], + ScanLimitResult__1 + >, + 'scanLimitOuter' : ActorMethod< + [ + { + 'dir' : Direction__1, + 'lowerBound' : SK, + 'limit' : bigint, + 'upperBound' : SK, + 'outerKey' : OuterSubDBKey, + }, + ], + ScanLimitResult__1 + >, + 'scanSubDBs' : ActorMethod< + [], + Array<[OuterSubDBKey, { 'key' : InnerSubDBKey, 'canister' : Principal }]> + >, + 'setOwners' : ActorMethod<[Array], undefined>, + 'startInsertingImpl' : ActorMethod< + [{ 'sk' : SK, 'value' : AttributeValue, 'innerKey' : InnerSubDBKey }], + undefined + >, + 'subDBSizeByInner' : ActorMethod< + [{ 'innerKey' : InnerSubDBKey }], + [] | [bigint] + >, + 'subDBSizeByOuter' : ActorMethod< + [{ 'outerKey' : OuterSubDBKey }], + [] | [bigint] + >, + 'subDBSizeOuterImpl' : ActorMethod<[SubDBSizeOuterOptions], [] | [bigint]>, + 'superDBSize' : ActorMethod<[], bigint>, +} +export type SK = string; +export interface ScanLimitResult { + 'results' : Array<[string, AttributeValue]>, + 'nextKey' : [] | [string], +} +export interface ScanLimitResult__1 { + 'results' : Array<[string, AttributeValue]>, + 'nextKey' : [] | [string], +} +export interface SubDBSizeOuterOptions { 'outer' : OuterPair } +export interface _SERVICE extends Partition {} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did.js b/e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did.js new file mode 100644 index 0000000000..507a73ae1f --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/NacDBPartition/NacDBPartition.did.js @@ -0,0 +1,488 @@ +export const idlFactory = ({ IDL }) => { + const GetByOuterPartitionKeyOptions = IDL.Rec(); + const OuterPair = IDL.Rec(); + const OuterSubDBKey = IDL.Nat; + const InnerSubDBKey = IDL.Nat; + const SK = IDL.Text; + const AttributeValuePrimitive = IDL.Variant({ + 'int' : IDL.Int, + 'float' : IDL.Float64, + 'bool' : IDL.Bool, + 'text' : IDL.Text, + }); + const AttributeValue = IDL.Variant({ + 'int' : IDL.Int, + 'float' : IDL.Float64, + 'tuple' : IDL.Vec(AttributeValuePrimitive), + 'bool' : IDL.Bool, + 'text' : IDL.Text, + 'arrayBool' : IDL.Vec(IDL.Bool), + 'arrayText' : IDL.Vec(IDL.Text), + 'arrayInt' : IDL.Vec(IDL.Int), + 'arrayFloat' : IDL.Vec(IDL.Float64), + }); + const GetUserDataOuterOptions = IDL.Record({ 'outer' : OuterPair }); + const Direction = IDL.Variant({ 'bwd' : IDL.Null, 'fwd' : IDL.Null }); + const ScanLimitResult = IDL.Record({ + 'results' : IDL.Vec(IDL.Tuple(IDL.Text, AttributeValue)), + 'nextKey' : IDL.Opt(IDL.Text), + }); + const SubDBSizeOuterOptions = IDL.Record({ 'outer' : OuterPair }); + const OuterCanister = IDL.Service({ + 'createOuter' : IDL.Func( + [ + IDL.Record({ + 'part' : IDL.Principal, + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }), + ], + [ + IDL.Record({ + 'outer' : IDL.Record({ + 'key' : OuterSubDBKey, + 'canister' : IDL.Principal, + }), + 'inner' : IDL.Record({ + 'key' : InnerSubDBKey, + 'canister' : IDL.Principal, + }), + }), + ], + [], + ), + 'deleteInner' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'deleteSubDBInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'deleteSubDBOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [], + [], + ), + 'getByInner' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'innerKey' : InnerSubDBKey })], + [IDL.Opt(AttributeValue)], + ['query'], + ), + 'getByOuter' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'outerKey' : OuterSubDBKey })], + [IDL.Opt(AttributeValue)], + [], + ), + 'getInner' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [ + IDL.Opt( + IDL.Record({ 'key' : InnerSubDBKey, 'canister' : IDL.Principal }) + ), + ], + ['query'], + ), + 'getOuter' : IDL.Func( + [GetByOuterPartitionKeyOptions], + [IDL.Opt(AttributeValue)], + [], + ), + 'getSubDBUserDataInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Opt(IDL.Text)], + [], + ), + 'getSubDBUserDataOuter' : IDL.Func( + [GetUserDataOuterOptions], + [IDL.Opt(IDL.Text)], + [], + ), + 'hasByInner' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'innerKey' : InnerSubDBKey })], + [IDL.Bool], + ['query'], + ), + 'hasByOuter' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'outerKey' : OuterSubDBKey })], + [IDL.Bool], + [], + ), + 'hasSubDBByInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Bool], + ['query'], + ), + 'hasSubDBByOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [IDL.Bool], + [], + ), + 'isOverflowed' : IDL.Func([], [IDL.Bool], ['query']), + 'putLocation' : IDL.Func( + [ + IDL.Record({ + 'newInnerSubDBKey' : InnerSubDBKey, + 'innerCanister' : IDL.Principal, + 'outerKey' : OuterSubDBKey, + }), + ], + [], + [], + ), + 'rawDeleteSubDB' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'rawGetSubDB' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [ + IDL.Opt( + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK, AttributeValue)), + 'userData' : IDL.Text, + }) + ), + ], + ['query'], + ), + 'rawInsertSubDB' : IDL.Func( + [ + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK, AttributeValue)), + 'userData' : IDL.Text, + 'hardCap' : IDL.Opt(IDL.Nat), + 'innerKey' : IDL.Opt(InnerSubDBKey), + }), + ], + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + ), + 'rawInsertSubDBAndSetOuter' : IDL.Func( + [ + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK, AttributeValue)), + 'userData' : IDL.Text, + 'keys' : IDL.Opt( + IDL.Record({ + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }) + ), + 'hardCap' : IDL.Opt(IDL.Nat), + }), + ], + [ + IDL.Record({ + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }), + ], + [], + ), + 'scanLimitInner' : IDL.Func( + [ + IDL.Record({ + 'dir' : Direction, + 'lowerBound' : SK, + 'limit' : IDL.Nat, + 'upperBound' : SK, + 'innerKey' : InnerSubDBKey, + }), + ], + [ScanLimitResult], + ['query'], + ), + 'scanLimitOuter' : IDL.Func( + [ + IDL.Record({ + 'dir' : Direction, + 'lowerBound' : SK, + 'limit' : IDL.Nat, + 'upperBound' : SK, + 'outerKey' : OuterSubDBKey, + }), + ], + [ScanLimitResult], + [], + ), + 'scanSubDBs' : IDL.Func( + [], + [ + IDL.Vec( + IDL.Tuple( + OuterSubDBKey, + IDL.Record({ 'key' : InnerSubDBKey, 'canister' : IDL.Principal }), + ) + ), + ], + ['query'], + ), + 'startInsertingImpl' : IDL.Func( + [ + IDL.Record({ + 'sk' : SK, + 'value' : AttributeValue, + 'innerKey' : InnerSubDBKey, + }), + ], + [], + [], + ), + 'subDBSizeByInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Opt(IDL.Nat)], + ['query'], + ), + 'subDBSizeByOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [IDL.Opt(IDL.Nat)], + [], + ), + 'subDBSizeOuterImpl' : IDL.Func( + [SubDBSizeOuterOptions], + [IDL.Opt(IDL.Nat)], + [], + ), + 'superDBSize' : IDL.Func([], [IDL.Nat], ['query']), + }); + OuterPair.fill( + IDL.Record({ 'key' : OuterSubDBKey, 'canister' : OuterCanister }) + ); + GetByOuterPartitionKeyOptions.fill( + IDL.Record({ 'sk' : SK, 'outer' : OuterPair }) + ); + const Direction__1 = IDL.Variant({ 'bwd' : IDL.Null, 'fwd' : IDL.Null }); + const ScanLimitResult__1 = IDL.Record({ + 'results' : IDL.Vec(IDL.Tuple(IDL.Text, AttributeValue)), + 'nextKey' : IDL.Opt(IDL.Text), + }); + const Partition = IDL.Service({ + 'createOuter' : IDL.Func( + [ + IDL.Record({ + 'part' : IDL.Principal, + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }), + ], + [ + IDL.Record({ + 'outer' : IDL.Record({ + 'key' : OuterSubDBKey, + 'canister' : IDL.Principal, + }), + 'inner' : IDL.Record({ + 'key' : InnerSubDBKey, + 'canister' : IDL.Principal, + }), + }), + ], + [], + ), + 'deleteInner' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'deleteSubDBInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'deleteSubDBOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [], + [], + ), + 'getByInner' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'innerKey' : InnerSubDBKey })], + [IDL.Opt(AttributeValue)], + ['query'], + ), + 'getByOuter' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'outerKey' : OuterSubDBKey })], + [IDL.Opt(AttributeValue)], + [], + ), + 'getInner' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [ + IDL.Opt( + IDL.Record({ 'key' : InnerSubDBKey, 'canister' : IDL.Principal }) + ), + ], + ['query'], + ), + 'getOuter' : IDL.Func( + [GetByOuterPartitionKeyOptions], + [IDL.Opt(AttributeValue)], + [], + ), + 'getOwners' : IDL.Func([], [IDL.Vec(IDL.Principal)], ['query']), + 'getSubDBUserDataInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Opt(IDL.Text)], + [], + ), + 'getSubDBUserDataOuter' : IDL.Func( + [GetUserDataOuterOptions], + [IDL.Opt(IDL.Text)], + [], + ), + 'hasByInner' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'innerKey' : InnerSubDBKey })], + [IDL.Bool], + ['query'], + ), + 'hasByOuter' : IDL.Func( + [IDL.Record({ 'sk' : SK, 'outerKey' : OuterSubDBKey })], + [IDL.Bool], + [], + ), + 'hasSubDBByInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Bool], + ['query'], + ), + 'hasSubDBByOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [IDL.Bool], + [], + ), + 'isOverflowed' : IDL.Func([], [IDL.Bool], ['query']), + 'putLocation' : IDL.Func( + [ + IDL.Record({ + 'newInnerSubDBKey' : InnerSubDBKey, + 'innerCanister' : IDL.Principal, + 'outerKey' : OuterSubDBKey, + }), + ], + [], + [], + ), + 'rawDeleteSubDB' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + [], + ), + 'rawGetSubDB' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [ + IDL.Opt( + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK, AttributeValue)), + 'userData' : IDL.Text, + }) + ), + ], + ['query'], + ), + 'rawInsertSubDB' : IDL.Func( + [ + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK, AttributeValue)), + 'userData' : IDL.Text, + 'hardCap' : IDL.Opt(IDL.Nat), + 'innerKey' : IDL.Opt(InnerSubDBKey), + }), + ], + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [], + ), + 'rawInsertSubDBAndSetOuter' : IDL.Func( + [ + IDL.Record({ + 'map' : IDL.Vec(IDL.Tuple(SK, AttributeValue)), + 'userData' : IDL.Text, + 'keys' : IDL.Opt( + IDL.Record({ + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }) + ), + 'hardCap' : IDL.Opt(IDL.Nat), + }), + ], + [ + IDL.Record({ + 'outerKey' : OuterSubDBKey, + 'innerKey' : InnerSubDBKey, + }), + ], + [], + ), + 'scanLimitInner' : IDL.Func( + [ + IDL.Record({ + 'dir' : Direction__1, + 'lowerBound' : SK, + 'limit' : IDL.Nat, + 'upperBound' : SK, + 'innerKey' : InnerSubDBKey, + }), + ], + [ScanLimitResult__1], + ['query'], + ), + 'scanLimitOuter' : IDL.Func( + [ + IDL.Record({ + 'dir' : Direction__1, + 'lowerBound' : SK, + 'limit' : IDL.Nat, + 'upperBound' : SK, + 'outerKey' : OuterSubDBKey, + }), + ], + [ScanLimitResult__1], + [], + ), + 'scanSubDBs' : IDL.Func( + [], + [ + IDL.Vec( + IDL.Tuple( + OuterSubDBKey, + IDL.Record({ 'key' : InnerSubDBKey, 'canister' : IDL.Principal }), + ) + ), + ], + ['query'], + ), + 'setOwners' : IDL.Func([IDL.Vec(IDL.Principal)], [], []), + 'startInsertingImpl' : IDL.Func( + [ + IDL.Record({ + 'sk' : SK, + 'value' : AttributeValue, + 'innerKey' : InnerSubDBKey, + }), + ], + [], + [], + ), + 'subDBSizeByInner' : IDL.Func( + [IDL.Record({ 'innerKey' : InnerSubDBKey })], + [IDL.Opt(IDL.Nat)], + ['query'], + ), + 'subDBSizeByOuter' : IDL.Func( + [IDL.Record({ 'outerKey' : OuterSubDBKey })], + [IDL.Opt(IDL.Nat)], + [], + ), + 'subDBSizeOuterImpl' : IDL.Func( + [SubDBSizeOuterOptions], + [IDL.Opt(IDL.Nat)], + [], + ), + 'superDBSize' : IDL.Func([], [IDL.Nat], ['query']), + }); + return Partition; +}; +export const init = ({ IDL }) => { return [IDL.Vec(IDL.Principal)]; }; diff --git a/e2e/assets/wrong_ids/src/declarations/NacDBPartition/index.d.ts b/e2e/assets/wrong_ids/src/declarations/NacDBPartition/index.d.ts new file mode 100644 index 0000000000..18c5994010 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/NacDBPartition/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './NacDBPartition.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const NacDBPartition: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/NacDBPartition/index.js b/e2e/assets/wrong_ids/src/declarations/NacDBPartition/index.js new file mode 100644 index 0000000000..6a5b8cbcfe --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/NacDBPartition/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./NacDBPartition.did.js"; +export { idlFactory } from "./NacDBPartition.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_NACDBPARTITION; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const NacDBPartition = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/frontend/frontend.did b/e2e/assets/wrong_ids/src/declarations/frontend/frontend.did new file mode 100644 index 0000000000..51bb1a235b --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/frontend/frontend.did @@ -0,0 +1,262 @@ +type BatchId = nat; +type ChunkId = nat; +type Key = text; +type Time = int; + +type CreateAssetArguments = record { + key: Key; + content_type: text; + max_age: opt nat64; + headers: opt vec HeaderField; + enable_aliasing: opt bool; + allow_raw_access: opt bool; +}; + +// Add or change content for an asset, by content encoding +type SetAssetContentArguments = record { + key: Key; + content_encoding: text; + chunk_ids: vec ChunkId; + sha256: opt blob; +}; + +// Remove content for an asset, by content encoding +type UnsetAssetContentArguments = record { + key: Key; + content_encoding: text; +}; + +// Delete an asset +type DeleteAssetArguments = record { + key: Key; +}; + +// Reset everything +type ClearArguments = record {}; + +type BatchOperationKind = variant { + CreateAsset: CreateAssetArguments; + SetAssetContent: SetAssetContentArguments; + + SetAssetProperties: SetAssetPropertiesArguments; + + UnsetAssetContent: UnsetAssetContentArguments; + DeleteAsset: DeleteAssetArguments; + + Clear: ClearArguments; +}; + +type CommitBatchArguments = record { + batch_id: BatchId; + operations: vec BatchOperationKind +}; + +type CommitProposedBatchArguments = record { + batch_id: BatchId; + evidence: blob; +}; + +type ComputeEvidenceArguments = record { + batch_id: BatchId; + max_iterations: opt nat16 +}; + +type DeleteBatchArguments = record { + batch_id: BatchId; +}; + +type HeaderField = record { text; text; }; + +type HttpRequest = record { + method: text; + url: text; + headers: vec HeaderField; + body: blob; + certificate_version: opt nat16; +}; + +type HttpResponse = record { + status_code: nat16; + headers: vec HeaderField; + body: blob; + streaming_strategy: opt StreamingStrategy; +}; + +type StreamingCallbackHttpResponse = record { + body: blob; + token: opt StreamingCallbackToken; +}; + +type StreamingCallbackToken = record { + key: Key; + content_encoding: text; + index: nat; + sha256: opt blob; +}; + +type StreamingStrategy = variant { + Callback: record { + callback: func (StreamingCallbackToken) -> (opt StreamingCallbackHttpResponse) query; + token: StreamingCallbackToken; + }; +}; + +type SetAssetPropertiesArguments = record { + key: Key; + max_age: opt opt nat64; + headers: opt opt vec HeaderField; + allow_raw_access: opt opt bool; + is_aliased: opt opt bool; +}; + +type ConfigurationResponse = record { + max_batches: opt nat64; + max_chunks: opt nat64; + max_bytes: opt nat64; +}; + +type ConfigureArguments = record { + max_batches: opt opt nat64; + max_chunks: opt opt nat64; + max_bytes: opt opt nat64; +}; + +type Permission = variant { + Commit; + ManagePermissions; + Prepare; +}; + +type GrantPermission = record { + to_principal: principal; + permission: Permission; +}; +type RevokePermission = record { + of_principal: principal; + permission: Permission; +}; +type ListPermitted = record { permission: Permission }; + +type ValidationResult = variant { Ok : text; Err : text }; + +type AssetCanisterArgs = variant { + Init: InitArgs; + Upgrade: UpgradeArgs; +}; + +type InitArgs = record {}; + +type UpgradeArgs = record { + set_permissions: opt SetPermissions; +}; + +/// Sets the list of principals granted each permission. +type SetPermissions = record { + prepare: vec principal; + commit: vec principal; + manage_permissions: vec principal; +}; + +service: (asset_canister_args: opt AssetCanisterArgs) -> { + api_version: () -> (nat16) query; + + get: (record { + key: Key; + accept_encodings: vec text; + }) -> (record { + content: blob; // may be the entirety of the content, or just chunk index 0 + content_type: text; + content_encoding: text; + sha256: opt blob; // sha256 of entire asset encoding, calculated by dfx and passed in SetAssetContentArguments + total_length: nat; // all chunks except last have size == content.size() + }) query; + + // if get() returned chunks > 1, call this to retrieve them. + // chunks may or may not be split up at the same boundaries as presented to create_chunk(). + get_chunk: (record { + key: Key; + content_encoding: text; + index: nat; + sha256: opt blob; // sha256 of entire asset encoding, calculated by dfx and passed in SetAssetContentArguments + }) -> (record { content: blob }) query; + + list : (record {}) -> (vec record { + key: Key; + content_type: text; + encodings: vec record { + content_encoding: text; + sha256: opt blob; // sha256 of entire asset encoding, calculated by dfx and passed in SetAssetContentArguments + length: nat; // Size of this encoding's blob. Calculated when uploading assets. + modified: Time; + }; + }) query; + + certified_tree : (record {}) -> (record { + certificate: blob; + tree: blob; + }) query; + + create_batch : (record {}) -> (record { batch_id: BatchId }); + + create_chunk: (record { batch_id: BatchId; content: blob }) -> (record { chunk_id: ChunkId }); + + // Perform all operations successfully, or reject + commit_batch: (CommitBatchArguments) -> (); + + // Save the batch operations for later commit + propose_commit_batch: (CommitBatchArguments) -> (); + + // Given a batch already proposed, perform all operations successfully, or reject + commit_proposed_batch: (CommitProposedBatchArguments) -> (); + + // Compute a hash over the CommitBatchArguments. Call until it returns Some(evidence). + compute_evidence: (ComputeEvidenceArguments) -> (opt blob); + + // Delete a batch that has been created, or proposed for commit, but not yet committed + delete_batch: (DeleteBatchArguments) -> (); + + create_asset: (CreateAssetArguments) -> (); + set_asset_content: (SetAssetContentArguments) -> (); + unset_asset_content: (UnsetAssetContentArguments) -> (); + + delete_asset: (DeleteAssetArguments) -> (); + + clear: (ClearArguments) -> (); + + // Single call to create an asset with content for a single content encoding that + // fits within the message ingress limit. + store: (record { + key: Key; + content_type: text; + content_encoding: text; + content: blob; + sha256: opt blob + }) -> (); + + http_request: (request: HttpRequest) -> (HttpResponse) query; + http_request_streaming_callback: (token: StreamingCallbackToken) -> (opt StreamingCallbackHttpResponse) query; + + authorize: (principal) -> (); + deauthorize: (principal) -> (); + list_authorized: () -> (vec principal); + grant_permission: (GrantPermission) -> (); + revoke_permission: (RevokePermission) -> (); + list_permitted: (ListPermitted) -> (vec principal); + take_ownership: () -> (); + + get_asset_properties : (key: Key) -> (record { + max_age: opt nat64; + headers: opt vec HeaderField; + allow_raw_access: opt bool; + is_aliased: opt bool; } ) query; + set_asset_properties: (SetAssetPropertiesArguments) -> (); + + get_configuration: () -> (ConfigurationResponse); + configure: (ConfigureArguments) -> (); + + validate_grant_permission: (GrantPermission) -> (ValidationResult); + validate_revoke_permission: (RevokePermission) -> (ValidationResult); + validate_take_ownership: () -> (ValidationResult); + validate_commit_proposed_batch: (CommitProposedBatchArguments) -> (ValidationResult); + validate_configure: (ConfigureArguments) -> (ValidationResult); +} diff --git a/e2e/assets/wrong_ids/src/declarations/frontend/frontend.did.d.ts b/e2e/assets/wrong_ids/src/declarations/frontend/frontend.did.d.ts new file mode 100644 index 0000000000..ce964689f5 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/frontend/frontend.did.d.ts @@ -0,0 +1,240 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export type AssetCanisterArgs = { 'Upgrade' : UpgradeArgs } | + { 'Init' : InitArgs }; +export type BatchId = bigint; +export type BatchOperationKind = { + 'SetAssetProperties' : SetAssetPropertiesArguments + } | + { 'CreateAsset' : CreateAssetArguments } | + { 'UnsetAssetContent' : UnsetAssetContentArguments } | + { 'DeleteAsset' : DeleteAssetArguments } | + { 'SetAssetContent' : SetAssetContentArguments } | + { 'Clear' : ClearArguments }; +export type ChunkId = bigint; +export type ClearArguments = {}; +export interface CommitBatchArguments { + 'batch_id' : BatchId, + 'operations' : Array, +} +export interface CommitProposedBatchArguments { + 'batch_id' : BatchId, + 'evidence' : Uint8Array | number[], +} +export interface ComputeEvidenceArguments { + 'batch_id' : BatchId, + 'max_iterations' : [] | [number], +} +export interface ConfigurationResponse { + 'max_batches' : [] | [bigint], + 'max_bytes' : [] | [bigint], + 'max_chunks' : [] | [bigint], +} +export interface ConfigureArguments { + 'max_batches' : [] | [[] | [bigint]], + 'max_bytes' : [] | [[] | [bigint]], + 'max_chunks' : [] | [[] | [bigint]], +} +export interface CreateAssetArguments { + 'key' : Key, + 'content_type' : string, + 'headers' : [] | [Array], + 'allow_raw_access' : [] | [boolean], + 'max_age' : [] | [bigint], + 'enable_aliasing' : [] | [boolean], +} +export interface DeleteAssetArguments { 'key' : Key } +export interface DeleteBatchArguments { 'batch_id' : BatchId } +export interface GrantPermission { + 'permission' : Permission, + 'to_principal' : Principal, +} +export type HeaderField = [string, string]; +export interface HttpRequest { + 'url' : string, + 'method' : string, + 'body' : Uint8Array | number[], + 'headers' : Array, + 'certificate_version' : [] | [number], +} +export interface HttpResponse { + 'body' : Uint8Array | number[], + 'headers' : Array, + 'streaming_strategy' : [] | [StreamingStrategy], + 'status_code' : number, +} +export type InitArgs = {}; +export type Key = string; +export interface ListPermitted { 'permission' : Permission } +export type Permission = { 'Prepare' : null } | + { 'ManagePermissions' : null } | + { 'Commit' : null }; +export interface RevokePermission { + 'permission' : Permission, + 'of_principal' : Principal, +} +export interface SetAssetContentArguments { + 'key' : Key, + 'sha256' : [] | [Uint8Array | number[]], + 'chunk_ids' : Array, + 'content_encoding' : string, +} +export interface SetAssetPropertiesArguments { + 'key' : Key, + 'headers' : [] | [[] | [Array]], + 'is_aliased' : [] | [[] | [boolean]], + 'allow_raw_access' : [] | [[] | [boolean]], + 'max_age' : [] | [[] | [bigint]], +} +export interface SetPermissions { + 'prepare' : Array, + 'commit' : Array, + 'manage_permissions' : Array, +} +export interface StreamingCallbackHttpResponse { + 'token' : [] | [StreamingCallbackToken], + 'body' : Uint8Array | number[], +} +export interface StreamingCallbackToken { + 'key' : Key, + 'sha256' : [] | [Uint8Array | number[]], + 'index' : bigint, + 'content_encoding' : string, +} +export type StreamingStrategy = { + 'Callback' : { + 'token' : StreamingCallbackToken, + 'callback' : [Principal, string], + } + }; +export type Time = bigint; +export interface UnsetAssetContentArguments { + 'key' : Key, + 'content_encoding' : string, +} +export interface UpgradeArgs { 'set_permissions' : [] | [SetPermissions] } +export type ValidationResult = { 'Ok' : string } | + { 'Err' : string }; +export interface _SERVICE { + 'api_version' : ActorMethod<[], number>, + 'authorize' : ActorMethod<[Principal], undefined>, + 'certified_tree' : ActorMethod< + [{}], + { 'certificate' : Uint8Array | number[], 'tree' : Uint8Array | number[] } + >, + 'clear' : ActorMethod<[ClearArguments], undefined>, + 'commit_batch' : ActorMethod<[CommitBatchArguments], undefined>, + 'commit_proposed_batch' : ActorMethod< + [CommitProposedBatchArguments], + undefined + >, + 'compute_evidence' : ActorMethod< + [ComputeEvidenceArguments], + [] | [Uint8Array | number[]] + >, + 'configure' : ActorMethod<[ConfigureArguments], undefined>, + 'create_asset' : ActorMethod<[CreateAssetArguments], undefined>, + 'create_batch' : ActorMethod<[{}], { 'batch_id' : BatchId }>, + 'create_chunk' : ActorMethod< + [{ 'content' : Uint8Array | number[], 'batch_id' : BatchId }], + { 'chunk_id' : ChunkId } + >, + 'deauthorize' : ActorMethod<[Principal], undefined>, + 'delete_asset' : ActorMethod<[DeleteAssetArguments], undefined>, + 'delete_batch' : ActorMethod<[DeleteBatchArguments], undefined>, + 'get' : ActorMethod< + [{ 'key' : Key, 'accept_encodings' : Array }], + { + 'content' : Uint8Array | number[], + 'sha256' : [] | [Uint8Array | number[]], + 'content_type' : string, + 'content_encoding' : string, + 'total_length' : bigint, + } + >, + 'get_asset_properties' : ActorMethod< + [Key], + { + 'headers' : [] | [Array], + 'is_aliased' : [] | [boolean], + 'allow_raw_access' : [] | [boolean], + 'max_age' : [] | [bigint], + } + >, + 'get_chunk' : ActorMethod< + [ + { + 'key' : Key, + 'sha256' : [] | [Uint8Array | number[]], + 'index' : bigint, + 'content_encoding' : string, + }, + ], + { 'content' : Uint8Array | number[] } + >, + 'get_configuration' : ActorMethod<[], ConfigurationResponse>, + 'grant_permission' : ActorMethod<[GrantPermission], undefined>, + 'http_request' : ActorMethod<[HttpRequest], HttpResponse>, + 'http_request_streaming_callback' : ActorMethod< + [StreamingCallbackToken], + [] | [StreamingCallbackHttpResponse] + >, + 'list' : ActorMethod< + [{}], + Array< + { + 'key' : Key, + 'encodings' : Array< + { + 'modified' : Time, + 'sha256' : [] | [Uint8Array | number[]], + 'length' : bigint, + 'content_encoding' : string, + } + >, + 'content_type' : string, + } + > + >, + 'list_authorized' : ActorMethod<[], Array>, + 'list_permitted' : ActorMethod<[ListPermitted], Array>, + 'propose_commit_batch' : ActorMethod<[CommitBatchArguments], undefined>, + 'revoke_permission' : ActorMethod<[RevokePermission], undefined>, + 'set_asset_content' : ActorMethod<[SetAssetContentArguments], undefined>, + 'set_asset_properties' : ActorMethod< + [SetAssetPropertiesArguments], + undefined + >, + 'store' : ActorMethod< + [ + { + 'key' : Key, + 'content' : Uint8Array | number[], + 'sha256' : [] | [Uint8Array | number[]], + 'content_type' : string, + 'content_encoding' : string, + }, + ], + undefined + >, + 'take_ownership' : ActorMethod<[], undefined>, + 'unset_asset_content' : ActorMethod<[UnsetAssetContentArguments], undefined>, + 'validate_commit_proposed_batch' : ActorMethod< + [CommitProposedBatchArguments], + ValidationResult + >, + 'validate_configure' : ActorMethod<[ConfigureArguments], ValidationResult>, + 'validate_grant_permission' : ActorMethod< + [GrantPermission], + ValidationResult + >, + 'validate_revoke_permission' : ActorMethod< + [RevokePermission], + ValidationResult + >, + 'validate_take_ownership' : ActorMethod<[], ValidationResult>, +} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/frontend/frontend.did.js b/e2e/assets/wrong_ids/src/declarations/frontend/frontend.did.js new file mode 100644 index 0000000000..b68ea4a42a --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/frontend/frontend.did.js @@ -0,0 +1,286 @@ +export const idlFactory = ({ IDL }) => { + const SetPermissions = IDL.Record({ + 'prepare' : IDL.Vec(IDL.Principal), + 'commit' : IDL.Vec(IDL.Principal), + 'manage_permissions' : IDL.Vec(IDL.Principal), + }); + const UpgradeArgs = IDL.Record({ + 'set_permissions' : IDL.Opt(SetPermissions), + }); + const InitArgs = IDL.Record({}); + const AssetCanisterArgs = IDL.Variant({ + 'Upgrade' : UpgradeArgs, + 'Init' : InitArgs, + }); + const ClearArguments = IDL.Record({}); + const BatchId = IDL.Nat; + const Key = IDL.Text; + const HeaderField = IDL.Tuple(IDL.Text, IDL.Text); + const SetAssetPropertiesArguments = IDL.Record({ + 'key' : Key, + 'headers' : IDL.Opt(IDL.Opt(IDL.Vec(HeaderField))), + 'is_aliased' : IDL.Opt(IDL.Opt(IDL.Bool)), + 'allow_raw_access' : IDL.Opt(IDL.Opt(IDL.Bool)), + 'max_age' : IDL.Opt(IDL.Opt(IDL.Nat64)), + }); + const CreateAssetArguments = IDL.Record({ + 'key' : Key, + 'content_type' : IDL.Text, + 'headers' : IDL.Opt(IDL.Vec(HeaderField)), + 'allow_raw_access' : IDL.Opt(IDL.Bool), + 'max_age' : IDL.Opt(IDL.Nat64), + 'enable_aliasing' : IDL.Opt(IDL.Bool), + }); + const UnsetAssetContentArguments = IDL.Record({ + 'key' : Key, + 'content_encoding' : IDL.Text, + }); + const DeleteAssetArguments = IDL.Record({ 'key' : Key }); + const ChunkId = IDL.Nat; + const SetAssetContentArguments = IDL.Record({ + 'key' : Key, + 'sha256' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'chunk_ids' : IDL.Vec(ChunkId), + 'content_encoding' : IDL.Text, + }); + const BatchOperationKind = IDL.Variant({ + 'SetAssetProperties' : SetAssetPropertiesArguments, + 'CreateAsset' : CreateAssetArguments, + 'UnsetAssetContent' : UnsetAssetContentArguments, + 'DeleteAsset' : DeleteAssetArguments, + 'SetAssetContent' : SetAssetContentArguments, + 'Clear' : ClearArguments, + }); + const CommitBatchArguments = IDL.Record({ + 'batch_id' : BatchId, + 'operations' : IDL.Vec(BatchOperationKind), + }); + const CommitProposedBatchArguments = IDL.Record({ + 'batch_id' : BatchId, + 'evidence' : IDL.Vec(IDL.Nat8), + }); + const ComputeEvidenceArguments = IDL.Record({ + 'batch_id' : BatchId, + 'max_iterations' : IDL.Opt(IDL.Nat16), + }); + const ConfigureArguments = IDL.Record({ + 'max_batches' : IDL.Opt(IDL.Opt(IDL.Nat64)), + 'max_bytes' : IDL.Opt(IDL.Opt(IDL.Nat64)), + 'max_chunks' : IDL.Opt(IDL.Opt(IDL.Nat64)), + }); + const DeleteBatchArguments = IDL.Record({ 'batch_id' : BatchId }); + const ConfigurationResponse = IDL.Record({ + 'max_batches' : IDL.Opt(IDL.Nat64), + 'max_bytes' : IDL.Opt(IDL.Nat64), + 'max_chunks' : IDL.Opt(IDL.Nat64), + }); + const Permission = IDL.Variant({ + 'Prepare' : IDL.Null, + 'ManagePermissions' : IDL.Null, + 'Commit' : IDL.Null, + }); + const GrantPermission = IDL.Record({ + 'permission' : Permission, + 'to_principal' : IDL.Principal, + }); + const HttpRequest = IDL.Record({ + 'url' : IDL.Text, + 'method' : IDL.Text, + 'body' : IDL.Vec(IDL.Nat8), + 'headers' : IDL.Vec(HeaderField), + 'certificate_version' : IDL.Opt(IDL.Nat16), + }); + const StreamingCallbackToken = IDL.Record({ + 'key' : Key, + 'sha256' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'index' : IDL.Nat, + 'content_encoding' : IDL.Text, + }); + const StreamingCallbackHttpResponse = IDL.Record({ + 'token' : IDL.Opt(StreamingCallbackToken), + 'body' : IDL.Vec(IDL.Nat8), + }); + const StreamingStrategy = IDL.Variant({ + 'Callback' : IDL.Record({ + 'token' : StreamingCallbackToken, + 'callback' : IDL.Func( + [StreamingCallbackToken], + [IDL.Opt(StreamingCallbackHttpResponse)], + ['query'], + ), + }), + }); + const HttpResponse = IDL.Record({ + 'body' : IDL.Vec(IDL.Nat8), + 'headers' : IDL.Vec(HeaderField), + 'streaming_strategy' : IDL.Opt(StreamingStrategy), + 'status_code' : IDL.Nat16, + }); + const Time = IDL.Int; + const ListPermitted = IDL.Record({ 'permission' : Permission }); + const RevokePermission = IDL.Record({ + 'permission' : Permission, + 'of_principal' : IDL.Principal, + }); + const ValidationResult = IDL.Variant({ 'Ok' : IDL.Text, 'Err' : IDL.Text }); + return IDL.Service({ + 'api_version' : IDL.Func([], [IDL.Nat16], ['query']), + 'authorize' : IDL.Func([IDL.Principal], [], []), + 'certified_tree' : IDL.Func( + [IDL.Record({})], + [ + IDL.Record({ + 'certificate' : IDL.Vec(IDL.Nat8), + 'tree' : IDL.Vec(IDL.Nat8), + }), + ], + ['query'], + ), + 'clear' : IDL.Func([ClearArguments], [], []), + 'commit_batch' : IDL.Func([CommitBatchArguments], [], []), + 'commit_proposed_batch' : IDL.Func([CommitProposedBatchArguments], [], []), + 'compute_evidence' : IDL.Func( + [ComputeEvidenceArguments], + [IDL.Opt(IDL.Vec(IDL.Nat8))], + [], + ), + 'configure' : IDL.Func([ConfigureArguments], [], []), + 'create_asset' : IDL.Func([CreateAssetArguments], [], []), + 'create_batch' : IDL.Func( + [IDL.Record({})], + [IDL.Record({ 'batch_id' : BatchId })], + [], + ), + 'create_chunk' : IDL.Func( + [IDL.Record({ 'content' : IDL.Vec(IDL.Nat8), 'batch_id' : BatchId })], + [IDL.Record({ 'chunk_id' : ChunkId })], + [], + ), + 'deauthorize' : IDL.Func([IDL.Principal], [], []), + 'delete_asset' : IDL.Func([DeleteAssetArguments], [], []), + 'delete_batch' : IDL.Func([DeleteBatchArguments], [], []), + 'get' : IDL.Func( + [IDL.Record({ 'key' : Key, 'accept_encodings' : IDL.Vec(IDL.Text) })], + [ + IDL.Record({ + 'content' : IDL.Vec(IDL.Nat8), + 'sha256' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'content_type' : IDL.Text, + 'content_encoding' : IDL.Text, + 'total_length' : IDL.Nat, + }), + ], + ['query'], + ), + 'get_asset_properties' : IDL.Func( + [Key], + [ + IDL.Record({ + 'headers' : IDL.Opt(IDL.Vec(HeaderField)), + 'is_aliased' : IDL.Opt(IDL.Bool), + 'allow_raw_access' : IDL.Opt(IDL.Bool), + 'max_age' : IDL.Opt(IDL.Nat64), + }), + ], + ['query'], + ), + 'get_chunk' : IDL.Func( + [ + IDL.Record({ + 'key' : Key, + 'sha256' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'index' : IDL.Nat, + 'content_encoding' : IDL.Text, + }), + ], + [IDL.Record({ 'content' : IDL.Vec(IDL.Nat8) })], + ['query'], + ), + 'get_configuration' : IDL.Func([], [ConfigurationResponse], []), + 'grant_permission' : IDL.Func([GrantPermission], [], []), + 'http_request' : IDL.Func([HttpRequest], [HttpResponse], ['query']), + 'http_request_streaming_callback' : IDL.Func( + [StreamingCallbackToken], + [IDL.Opt(StreamingCallbackHttpResponse)], + ['query'], + ), + 'list' : IDL.Func( + [IDL.Record({})], + [ + IDL.Vec( + IDL.Record({ + 'key' : Key, + 'encodings' : IDL.Vec( + IDL.Record({ + 'modified' : Time, + 'sha256' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'length' : IDL.Nat, + 'content_encoding' : IDL.Text, + }) + ), + 'content_type' : IDL.Text, + }) + ), + ], + ['query'], + ), + 'list_authorized' : IDL.Func([], [IDL.Vec(IDL.Principal)], []), + 'list_permitted' : IDL.Func([ListPermitted], [IDL.Vec(IDL.Principal)], []), + 'propose_commit_batch' : IDL.Func([CommitBatchArguments], [], []), + 'revoke_permission' : IDL.Func([RevokePermission], [], []), + 'set_asset_content' : IDL.Func([SetAssetContentArguments], [], []), + 'set_asset_properties' : IDL.Func([SetAssetPropertiesArguments], [], []), + 'store' : IDL.Func( + [ + IDL.Record({ + 'key' : Key, + 'content' : IDL.Vec(IDL.Nat8), + 'sha256' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'content_type' : IDL.Text, + 'content_encoding' : IDL.Text, + }), + ], + [], + [], + ), + 'take_ownership' : IDL.Func([], [], []), + 'unset_asset_content' : IDL.Func([UnsetAssetContentArguments], [], []), + 'validate_commit_proposed_batch' : IDL.Func( + [CommitProposedBatchArguments], + [ValidationResult], + [], + ), + 'validate_configure' : IDL.Func( + [ConfigureArguments], + [ValidationResult], + [], + ), + 'validate_grant_permission' : IDL.Func( + [GrantPermission], + [ValidationResult], + [], + ), + 'validate_revoke_permission' : IDL.Func( + [RevokePermission], + [ValidationResult], + [], + ), + 'validate_take_ownership' : IDL.Func([], [ValidationResult], []), + }); +}; +export const init = ({ IDL }) => { + const SetPermissions = IDL.Record({ + 'prepare' : IDL.Vec(IDL.Principal), + 'commit' : IDL.Vec(IDL.Principal), + 'manage_permissions' : IDL.Vec(IDL.Principal), + }); + const UpgradeArgs = IDL.Record({ + 'set_permissions' : IDL.Opt(SetPermissions), + }); + const InitArgs = IDL.Record({}); + const AssetCanisterArgs = IDL.Variant({ + 'Upgrade' : UpgradeArgs, + 'Init' : InitArgs, + }); + return [IDL.Opt(AssetCanisterArgs)]; +}; diff --git a/e2e/assets/wrong_ids/src/declarations/frontend/index.d.ts b/e2e/assets/wrong_ids/src/declarations/frontend/index.d.ts new file mode 100644 index 0000000000..b550743587 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/frontend/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './frontend.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const frontend: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/frontend/index.js b/e2e/assets/wrong_ids/src/declarations/frontend/index.js new file mode 100644 index 0000000000..c5e4ab186b --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/frontend/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./frontend.did.js"; +export { idlFactory } from "./frontend.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_FRONTEND; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const frontend = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did b/e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did new file mode 100644 index 0000000000..d24eac5527 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did @@ -0,0 +1,5 @@ +service : { + verify_ecdsa : (eth_address : text, message : text, signature : text) -> (bool) query; + erc721_owner_of : (network : text, contract_address : text, token_id : nat64) -> (text); + erc1155_balance_of : (network : text, contract_address : text, owner_address : text, token_id : nat64) -> (nat); +} diff --git a/e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did.d.ts b/e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did.d.ts new file mode 100644 index 0000000000..7f7838c546 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did.d.ts @@ -0,0 +1,11 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export interface _SERVICE { + 'erc1155_balance_of' : ActorMethod<[string, string, string, bigint], bigint>, + 'erc721_owner_of' : ActorMethod<[string, string, bigint], string>, + 'verify_ecdsa' : ActorMethod<[string, string, string], boolean>, +} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did.js b/e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did.js new file mode 100644 index 0000000000..0b63e313c0 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/ic_eth/ic_eth.did.js @@ -0,0 +1,20 @@ +export const idlFactory = ({ IDL }) => { + return IDL.Service({ + 'erc1155_balance_of' : IDL.Func( + [IDL.Text, IDL.Text, IDL.Text, IDL.Nat64], + [IDL.Nat], + [], + ), + 'erc721_owner_of' : IDL.Func( + [IDL.Text, IDL.Text, IDL.Nat64], + [IDL.Text], + [], + ), + 'verify_ecdsa' : IDL.Func( + [IDL.Text, IDL.Text, IDL.Text], + [IDL.Bool], + ['query'], + ), + }); +}; +export const init = ({ IDL }) => { return []; }; diff --git a/e2e/assets/wrong_ids/src/declarations/ic_eth/index.d.ts b/e2e/assets/wrong_ids/src/declarations/ic_eth/index.d.ts new file mode 100644 index 0000000000..89f4f30d30 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/ic_eth/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './ic_eth.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const ic_eth: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/ic_eth/index.js b/e2e/assets/wrong_ids/src/declarations/ic_eth/index.js new file mode 100644 index 0000000000..dbd89d9538 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/ic_eth/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./ic_eth.did.js"; +export { idlFactory } from "./ic_eth.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_IC_ETH; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const ic_eth = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/internet_identity/index.d.ts b/e2e/assets/wrong_ids/src/declarations/internet_identity/index.d.ts new file mode 100644 index 0000000000..da7d676665 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/internet_identity/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './internet_identity.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const internet_identity: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/internet_identity/index.js b/e2e/assets/wrong_ids/src/declarations/internet_identity/index.js new file mode 100644 index 0000000000..98fbcdd417 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/internet_identity/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./internet_identity.did.js"; +export { idlFactory } from "./internet_identity.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_INTERNET_IDENTITY; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const internet_identity = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did b/e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did new file mode 100644 index 0000000000..dd46471214 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did @@ -0,0 +1,620 @@ +type UserNumber = nat64; +type PublicKey = blob; +type CredentialId = blob; +type DeviceKey = PublicKey; +type UserKey = PublicKey; +type SessionKey = PublicKey; +type FrontendHostname = text; +type Timestamp = nat64; + +type HeaderField = record { + text; + text; +}; + +type HttpRequest = record { + method: text; + url: text; + headers: vec HeaderField; + body: blob; + certificate_version: opt nat16; +}; + +type HttpResponse = record { + status_code: nat16; + headers: vec HeaderField; + body: blob; + upgrade : opt bool; + streaming_strategy: opt StreamingStrategy; +}; + +type StreamingCallbackHttpResponse = record { + body: blob; + token: opt Token; +}; + +type Token = record {}; + +type StreamingStrategy = variant { + Callback: record { + callback: func (Token) -> (StreamingCallbackHttpResponse) query; + token: Token; + }; +}; + +type Purpose = variant { + recovery; + authentication; +}; + +type KeyType = variant { + unknown; + platform; + cross_platform; + seed_phrase; + browser_storage_key; +}; + +// This describes whether a device is "protected" or not. +// When protected, a device can only be updated or removed if the +// user is authenticated with that very device. +type DeviceProtection = variant { + protected; + unprotected; +}; + +type Challenge = record { + png_base64: text; + challenge_key: ChallengeKey; +}; + +type DeviceData = record { + pubkey : DeviceKey; + alias : text; + credential_id : opt CredentialId; + purpose: Purpose; + key_type: KeyType; + protection: DeviceProtection; + origin: opt text; + // Metadata map for additional device information. + // + // Note: some fields above will be moved to the metadata map in the future. + // All field names of `DeviceData` (such as 'alias', 'origin, etc.) are + // reserved and cannot be written. + // In addition, the keys "usage" and "authenticator_attachment" are reserved as well. + metadata: opt MetadataMap; +}; + +// The same as `DeviceData` but with the `last_usage` field. +// This field cannot be written, hence the separate type. +type DeviceWithUsage = record { + pubkey : DeviceKey; + alias : text; + credential_id : opt CredentialId; + purpose: Purpose; + key_type: KeyType; + protection: DeviceProtection; + origin: opt text; + last_usage: opt Timestamp; + metadata: opt MetadataMap; +}; + +// Map with some variants for the value type. +// Note, due to the Candid mapping this must be a tuple type thus we cannot name the fields `key` and `value`. +type MetadataMap = vec record { + text; + variant { map : MetadataMap; string : text; bytes : vec nat8 }; +}; + +type RegisterResponse = variant { + // A new user was successfully registered. + registered: record { + user_number: UserNumber; + }; + // No more registrations are possible in this instance of the II service canister. + canister_full; + // The challenge was not successful. + bad_challenge; +}; + +type AddTentativeDeviceResponse = variant { + // The device was tentatively added. + added_tentatively: record { + verification_code: text; + // Expiration date, in nanos since the epoch + device_registration_timeout: Timestamp; + }; + // Device registration mode is off, either due to timeout or because it was never enabled. + device_registration_mode_off; + // There is another device already added tentatively + another_device_tentatively_added; +}; + +type VerifyTentativeDeviceResponse = variant { + // The device was successfully verified. + verified; + // Wrong verification code entered. Retry with correct code. + wrong_code: record { + retries_left: nat8 + }; + // Device registration mode is off, either due to timeout or because it was never enabled. + device_registration_mode_off; + // There is no tentative device to be verified. + no_device_to_verify; +}; + +type Delegation = record { + pubkey: PublicKey; + expiration: Timestamp; + targets: opt vec principal; +}; + +type SignedDelegation = record { + delegation: Delegation; + signature: blob; +}; + +type GetDelegationResponse = variant { + // The signed delegation was successfully retrieved. + signed_delegation: SignedDelegation; + + // The signature is not ready. Maybe retry by calling `prepare_delegation` + no_such_delegation +}; + +type InternetIdentityStats = record { + users_registered: nat64; + storage_layout_version: nat8; + assigned_user_number_range: record { + nat64; + nat64; + }; + archive_info: ArchiveInfo; + canister_creation_cycles_cost: nat64; + max_num_latest_delegation_origins: nat64; + latest_delegation_origins: vec FrontendHostname +}; + +// Configuration parameters related to the archive. +type ArchiveConfig = record { + // The allowed module hash of the archive canister. + // Changing this parameter does _not_ deploy the archive, but enable archive deployments with the + // corresponding wasm module. + module_hash : blob; + // Buffered archive entries limit. If reached, II will stop accepting new anchor operations + // until the buffered operations are acknowledged by the archive. + entries_buffer_limit: nat64; + // The maximum number of entries to be transferred to the archive per call. + entries_fetch_limit: nat16; + // Polling interval to fetch new entries from II (in nanoseconds). + // Changes to this parameter will only take effect after an archive deployment. + polling_interval_ns: nat64; +}; + +// Information about the archive. +type ArchiveInfo = record { + // Canister id of the archive or empty if no archive has been deployed yet. + archive_canister : opt principal; + // Configuration parameters related to the II archive. + archive_config: opt ArchiveConfig; +}; + +// Rate limit configuration. +// Currently only used for `register`. +type RateLimitConfig = record { + // Time it takes (in ns) for a rate limiting token to be replenished. + time_per_token_ns : nat64; + // How many tokens are at most generated (to accommodate peaks). + max_tokens: nat64; +}; + +// Init arguments of II which can be supplied on install and upgrade. +// Setting a value to null keeps the previous value. +type InternetIdentityInit = record { + // Set lowest and highest anchor + assigned_user_number_range : opt record { + nat64; + nat64; + }; + // Configuration parameters related to the II archive. + // Note: some parameters changes (like the polling interval) will only take effect after an archive deployment. + // See ArchiveConfig for details. + archive_config: opt ArchiveConfig; + // Set the amounts of cycles sent with the create canister message. + // This is configurable because in the staging environment cycles are required. + // The canister creation cost on mainnet is currently 100'000'000'000 cycles. If this value is higher thant the + // canister creation cost, the newly created canister will keep extra cycles. + canister_creation_cycles_cost : opt nat64; + // Rate limit for the `register` call. + register_rate_limit : opt RateLimitConfig; + // Maximum number of latest delegation origins to track. + // Default: 1000 + max_num_latest_delegation_origins : opt nat64; + // Maximum number of inflight captchas. + // Default: 500 + max_inflight_captchas: opt nat64; +}; + +type ChallengeKey = text; + +type ChallengeResult = record { + key : ChallengeKey; + chars : text; +}; +type CaptchaResult = ChallengeResult; + +// Extra information about registration status for new devices +type DeviceRegistrationInfo = record { + // If present, the user has tentatively added a new device. This + // new device needs to be verified (see relevant endpoint) before + // 'expiration'. + tentative_device : opt DeviceData; + // The timestamp at which the anchor will turn off registration mode + // (and the tentative device will be forgotten, if any, and if not verified) + expiration: Timestamp; +}; + +// Information about the anchor +type IdentityAnchorInfo = record { + // All devices that can authenticate to this anchor + devices : vec DeviceWithUsage; + // Device registration status used when adding devices, see DeviceRegistrationInfo + device_registration: opt DeviceRegistrationInfo; +}; + +type AnchorCredentials = record { + credentials : vec WebAuthnCredential; + recovery_credentials : vec WebAuthnCredential; + recovery_phrases: vec PublicKey; +}; + +type WebAuthnCredential = record { + credential_id : CredentialId; + pubkey: PublicKey; +}; + +type DeployArchiveResult = variant { + // The archive was deployed successfully and the supplied wasm module has been installed. The principal of the archive + // canister is returned. + success: principal; + // Initial archive creation is already in progress. + creation_in_progress; + // Archive deployment failed. An error description is returned. + failed: text; +}; + +type BufferedArchiveEntry = record { + anchor_number: UserNumber; + timestamp: Timestamp; + sequence_number: nat64; + entry: blob; +}; + +// API V2 specific types +// WARNING: These type are experimental and may change in the future. + +type IdentityNumber = nat64; + +// Map with some variants for the value type. +// Note, due to the Candid mapping this must be a tuple type thus we cannot name the fields `key` and `value`. +type MetadataMapV2 = vec record { + text; + variant { Map : MetadataMapV2; String : text; Bytes : vec nat8 }; +}; + +// Authentication method using WebAuthn signatures +// See https://www.w3.org/TR/webauthn-2/ +// This is a separate type because WebAuthn requires to also store +// the credential id (in addition to the public key). +type WebAuthn = record { + credential_id: CredentialId; + pubkey: PublicKey; +}; + +// Authentication method using generic signatures +// See https://internetcomputer.org/docs/current/references/ic-interface-spec/#signatures for +// supported signature schemes. +type PublicKeyAuthn = record { + pubkey: PublicKey; +}; + +// The authentication methods currently supported by II. +type AuthnMethod = variant { + WebAuthn: WebAuthn; + PubKey: PublicKeyAuthn; +}; + +// This describes whether an authentication method is "protected" or not. +// When protected, a authentication method can only be updated or removed if the +// user is authenticated with that very authentication method. +type AuthnMethodProtection = variant { + Protected; + Unprotected; +}; + +type AuthnMethodPurpose = variant { + Recovery; + Authentication; +}; + +type AuthnMethodSecuritySettings = record { + protection: AuthnMethodProtection; + purpose: AuthnMethodPurpose; +}; + +type AuthnMethodData = record { + authn_method: AuthnMethod; + security_settings: AuthnMethodSecuritySettings; + // contains the following fields of the DeviceWithUsage type: + // - alias + // - origin + // - authenticator_attachment: data taken from key_type and reduced to "platform", "cross_platform" or absent on migration + // - usage: data taken from key_type and reduced to "recovery_phrase", "browser_storage_key" or absent on migration + // Note: for compatibility reasons with the v1 API, the entries above (if present) + // must be of the `String` variant. This restriction may be lifted in the future. + metadata: MetadataMapV2; + last_authentication: opt Timestamp; +}; + +// Extra information about registration status for new authentication methods +type AuthnMethodRegistrationInfo = record { + // If present, the user has registered a new authentication method. This + // new authentication needs to be verified before 'expiration' in order to + // be added to the identity. + authn_method : opt AuthnMethodData; + // The timestamp at which the identity will turn off registration mode + // (and the authentication method will be forgotten, if any, and if not verified) + expiration: Timestamp; +}; + +type AuthnMethodConfirmationCode = record { + confirmation_code: text; + expiration: Timestamp; +}; + +type AuthnMethodRegisterError = variant { + // Authentication method registration mode is off, either due to timeout or because it was never enabled. + RegistrationModeOff; + // There is another authentication method already registered that needs to be confirmed first. + RegistrationAlreadyInProgress; + // The metadata of the provided authentication method contains invalid entries. + InvalidMetadata: text; +}; + +type AuthnMethodConfirmationError = variant { + // Wrong confirmation code entered. Retry with correct code. + WrongCode: record { + retries_left: nat8 + }; + // Authentication method registration mode is off, either due to timeout or because it was never enabled. + RegistrationModeOff; + // There is no registered authentication method to be confirmed. + NoAuthnMethodToConfirm; +}; + +type IdentityAuthnInfo = record { + authn_methods: vec AuthnMethod; + recovery_authn_methods: vec AuthnMethod; +}; + +type IdentityInfo = record { + authn_methods: vec AuthnMethodData; + authn_method_registration: opt AuthnMethodRegistrationInfo; + // Authentication method independent metadata + metadata: MetadataMapV2; +}; + +type IdentityInfoError = variant { + /// The principal is not authorized to call this method with the given arguments. + Unauthorized: principal; + /// Internal canister error. See the error message for details. + InternalCanisterError: text; +}; + + + +type IdentityRegisterError = variant { + // No more registrations are possible in this instance of the II service canister. + CanisterFull; + // The captcha check was not successful. + BadCaptcha; + // The metadata of the provided authentication method contains invalid entries. + InvalidMetadata: text; +}; + +type AuthnMethodAddError = variant { + InvalidMetadata: text; +}; + +type AuthnMethodReplaceError = variant { + InvalidMetadata: text; + // No authentication method found with the given public key. + AuthnMethodNotFound; +}; + +type AuthnMethodMetadataReplaceError = variant { + InvalidMetadata: text; + /// No authentication method found with the given public key. + AuthnMethodNotFound; +}; + +type AuthnMethodSecuritySettingsReplaceError = variant { + /// No authentication method found with the given public key. + AuthnMethodNotFound; +}; + +type IdentityMetadataReplaceError = variant { + /// The principal is not authorized to call this method with the given arguments. + Unauthorized: principal; + /// The identity including the new metadata exceeds the maximum allowed size. + StorageSpaceExceeded: record {space_available: nat64; space_required: nat64}; + /// Internal canister error. See the error message for details. + InternalCanisterError: text; +}; + +type PrepareIdAliasRequest = record { + /// Origin of the issuer in the attribute sharing flow. + issuer : FrontendHostname; + /// Origin of the relying party in the attribute sharing flow. + relying_party : FrontendHostname; + /// Identity for which the IdAlias should be generated. + identity_number : IdentityNumber; +}; + +type PrepareIdAliasError = variant { + /// The principal is not authorized to call this method with the given arguments. + Unauthorized: principal; + /// Internal canister error. See the error message for details. + InternalCanisterError: text; +}; + +/// The prepared id alias contains two (still unsigned) credentials in JWT format, +/// certifying the id alias for the issuer resp. the relying party. +type PreparedIdAlias = record { + rp_id_alias_jwt : text; + issuer_id_alias_jwt : text; + canister_sig_pk_der : PublicKey; +}; + +/// The request to retrieve the actual signed id alias credentials. +/// The field values should be equal to the values of corresponding +/// fields from the preceding `PrepareIdAliasRequest` and `PrepareIdAliasResponse`. +type GetIdAliasRequest = record { + rp_id_alias_jwt : text; + issuer : FrontendHostname; + issuer_id_alias_jwt : text; + relying_party : FrontendHostname; + identity_number : IdentityNumber; +}; + +type GetIdAliasError = variant { + /// The principal is not authorized to call this method with the given arguments. + Unauthorized: principal; + /// The credential(s) are not available: may be expired or not prepared yet (call prepare_id_alias to prepare). + NoSuchCredentials : text; + /// Internal canister error. See the error message for details. + InternalCanisterError: text; +}; + +/// The signed id alias credentials for each involved party. +type IdAliasCredentials = record { + rp_id_alias_credential : SignedIdAlias; + issuer_id_alias_credential : SignedIdAlias; +}; + +type SignedIdAlias = record { + credential_jws : text; + id_alias : principal; + id_dapp : principal; +}; + +service : (opt InternetIdentityInit) -> { + init_salt: () -> (); + create_challenge : () -> (Challenge); + register : (DeviceData, ChallengeResult, opt principal) -> (RegisterResponse); + add : (UserNumber, DeviceData) -> (); + update : (UserNumber, DeviceKey, DeviceData) -> (); + // Atomically replace device matching the device key with the new device data + replace : (UserNumber, DeviceKey, DeviceData) -> (); + remove : (UserNumber, DeviceKey) -> (); + // Returns all devices of the user (authentication and recovery) but no information about device registrations. + // Note: Clears out the 'alias' fields on the devices. Use 'get_anchor_info' to obtain the full information. + // Deprecated: Use 'get_anchor_credentials' instead. + lookup : (UserNumber) -> (vec DeviceData) query; + get_anchor_credentials : (UserNumber) -> (AnchorCredentials) query; + get_anchor_info : (UserNumber) -> (IdentityAnchorInfo); + get_principal : (UserNumber, FrontendHostname) -> (principal) query; + stats : () -> (InternetIdentityStats) query; + + enter_device_registration_mode : (UserNumber) -> (Timestamp); + exit_device_registration_mode : (UserNumber) -> (); + add_tentative_device : (UserNumber, DeviceData) -> (AddTentativeDeviceResponse); + verify_tentative_device : (UserNumber, verification_code: text) -> (VerifyTentativeDeviceResponse); + + prepare_delegation : (UserNumber, FrontendHostname, SessionKey, maxTimeToLive : opt nat64) -> (UserKey, Timestamp); + get_delegation: (UserNumber, FrontendHostname, SessionKey, Timestamp) -> (GetDelegationResponse) query; + + http_request: (request: HttpRequest) -> (HttpResponse) query; + http_request_update: (request: HttpRequest) -> (HttpResponse); + + deploy_archive: (wasm: blob) -> (DeployArchiveResult); + /// Returns a batch of entries _sorted by sequence number_ to be archived. + /// This is an update call because the archive information _must_ be certified. + /// Only callable by this IIs archive canister. + fetch_entries: () -> (vec BufferedArchiveEntry); + acknowledge_entries: (sequence_number: nat64) -> (); + + // V2 API + // WARNING: The following methods are experimental and may change in the future. + + // Creates a new captcha. The solution needs to be submitted using the + // `identity_register` call. + captcha_create: () -> (variant {Ok: Challenge; Err;}); + + // Registers a new identity with the given authn_method. + // A valid captcha solution to a previously generated captcha (using create_captcha) must be provided. + // The sender needs to match the supplied authn_method. + identity_register: (AuthnMethodData, CaptchaResult, opt principal) -> (variant {Ok: IdentityNumber; Err: IdentityRegisterError;}); + + // Returns information about the authentication methods of the identity with the given number. + // Only returns the minimal information required for authentication without exposing any metadata such as aliases. + identity_authn_info: (IdentityNumber) -> (variant {Ok: IdentityAuthnInfo; Err;}) query; + + // Returns information about the identity with the given number. + // Requires authentication. + identity_info: (IdentityNumber) -> (variant {Ok: IdentityInfo; Err: IdentityInfoError;}); + + // Replaces the authentication method independent metadata map. + // The existing metadata map will be overwritten. + // Requires authentication. + identity_metadata_replace: (IdentityNumber, MetadataMapV2) -> (variant {Ok; Err: IdentityMetadataReplaceError;}); + + // Adds a new authentication method to the identity. + // Requires authentication. + authn_method_add: (IdentityNumber, AuthnMethodData) -> (variant {Ok; Err: AuthnMethodAddError;}); + + // Atomically replaces the authentication method matching the supplied public key with the new authentication method + // provided. + // Requires authentication. + authn_method_replace: (IdentityNumber, PublicKey, AuthnMethodData) -> (variant {Ok; Err: AuthnMethodReplaceError;}); + + // Replaces the authentication method metadata map. + // The existing metadata map will be overwritten. + // Requires authentication. + authn_method_metadata_replace: (IdentityNumber, PublicKey, MetadataMapV2) -> (variant {Ok; Err: AuthnMethodMetadataReplaceError;}); + + // Replaces the authentication method security settings. + // The existing security settings will be overwritten. + // Requires authentication. + authn_method_security_settings_replace: (IdentityNumber, PublicKey, AuthnMethodSecuritySettings) -> (variant {Ok; Err: AuthnMethodSecuritySettingsReplaceError;}); + + // Removes the authentication method associated with the public key from the identity. + // Requires authentication. + authn_method_remove: (IdentityNumber, PublicKey) -> (variant {Ok; Err;}); + + // Enters the authentication method registration mode for the identity. + // In this mode, a new authentication method can be registered, which then needs to be + // confirmed before it can be used for authentication on this identity. + // The registration mode is automatically exited after the returned expiration timestamp. + // Requires authentication. + authn_method_registration_mode_enter : (IdentityNumber) -> (variant {Ok: record { expiration: Timestamp; }; Err;}); + + // Exits the authentication method registration mode for the identity. + // Requires authentication. + authn_method_registration_mode_exit : (IdentityNumber) -> (variant {Ok; Err;}); + + // Registers a new authentication method to the identity. + // This authentication method needs to be confirmed before it can be used for authentication on this identity. + authn_method_register: (IdentityNumber, AuthnMethodData) -> (variant {Ok: AuthnMethodConfirmationCode; Err: AuthnMethodRegisterError;}); + + // Confirms a previously registered authentication method. + // On successful confirmation, the authentication method is permanently added to the identity and can + // subsequently be used for authentication for that identity. + // Requires authentication. + authn_method_confirm: (IdentityNumber, confirmation_code: text) -> (variant {Ok; Err: AuthnMethodConfirmationError;}); + + // Attribute Sharing MVP API + // The methods below are used to generate ID-alias credentials during attribute sharing flow. + prepare_id_alias : (PrepareIdAliasRequest) -> (variant {Ok: PreparedIdAlias; Err: PrepareIdAliasError;}); + get_id_alias : (GetIdAliasRequest) -> (variant {Ok: IdAliasCredentials; Err: GetIdAliasError;}) query; +} diff --git a/e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did.d.ts b/e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did.d.ts new file mode 100644 index 0000000000..6429646323 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did.d.ts @@ -0,0 +1,392 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export type AddTentativeDeviceResponse = { + 'device_registration_mode_off' : null + } | + { 'another_device_tentatively_added' : null } | + { + 'added_tentatively' : { + 'verification_code' : string, + 'device_registration_timeout' : Timestamp, + } + }; +export interface AnchorCredentials { + 'recovery_phrases' : Array, + 'credentials' : Array, + 'recovery_credentials' : Array, +} +export interface ArchiveConfig { + 'polling_interval_ns' : bigint, + 'entries_buffer_limit' : bigint, + 'module_hash' : Uint8Array | number[], + 'entries_fetch_limit' : number, +} +export interface ArchiveInfo { + 'archive_config' : [] | [ArchiveConfig], + 'archive_canister' : [] | [Principal], +} +export type AuthnMethod = { 'PubKey' : PublicKeyAuthn } | + { 'WebAuthn' : WebAuthn }; +export type AuthnMethodAddError = { 'InvalidMetadata' : string }; +export interface AuthnMethodConfirmationCode { + 'confirmation_code' : string, + 'expiration' : Timestamp, +} +export type AuthnMethodConfirmationError = { 'RegistrationModeOff' : null } | + { 'NoAuthnMethodToConfirm' : null } | + { 'WrongCode' : { 'retries_left' : number } }; +export interface AuthnMethodData { + 'security_settings' : AuthnMethodSecuritySettings, + 'metadata' : MetadataMapV2, + 'last_authentication' : [] | [Timestamp], + 'authn_method' : AuthnMethod, +} +export type AuthnMethodMetadataReplaceError = { 'AuthnMethodNotFound' : null } | + { 'InvalidMetadata' : string }; +export type AuthnMethodProtection = { 'Protected' : null } | + { 'Unprotected' : null }; +export type AuthnMethodPurpose = { 'Recovery' : null } | + { 'Authentication' : null }; +export type AuthnMethodRegisterError = { 'RegistrationModeOff' : null } | + { 'RegistrationAlreadyInProgress' : null } | + { 'InvalidMetadata' : string }; +export interface AuthnMethodRegistrationInfo { + 'expiration' : Timestamp, + 'authn_method' : [] | [AuthnMethodData], +} +export type AuthnMethodReplaceError = { 'AuthnMethodNotFound' : null } | + { 'InvalidMetadata' : string }; +export interface AuthnMethodSecuritySettings { + 'protection' : AuthnMethodProtection, + 'purpose' : AuthnMethodPurpose, +} +export type AuthnMethodSecuritySettingsReplaceError = { + 'AuthnMethodNotFound' : null + }; +export interface BufferedArchiveEntry { + 'sequence_number' : bigint, + 'entry' : Uint8Array | number[], + 'anchor_number' : UserNumber, + 'timestamp' : Timestamp, +} +export type CaptchaResult = ChallengeResult; +export interface Challenge { + 'png_base64' : string, + 'challenge_key' : ChallengeKey, +} +export type ChallengeKey = string; +export interface ChallengeResult { 'key' : ChallengeKey, 'chars' : string } +export type CredentialId = Uint8Array | number[]; +export interface Delegation { + 'pubkey' : PublicKey, + 'targets' : [] | [Array], + 'expiration' : Timestamp, +} +export type DeployArchiveResult = { 'creation_in_progress' : null } | + { 'success' : Principal } | + { 'failed' : string }; +export interface DeviceData { + 'alias' : string, + 'metadata' : [] | [MetadataMap], + 'origin' : [] | [string], + 'protection' : DeviceProtection, + 'pubkey' : DeviceKey, + 'key_type' : KeyType, + 'purpose' : Purpose, + 'credential_id' : [] | [CredentialId], +} +export type DeviceKey = PublicKey; +export type DeviceProtection = { 'unprotected' : null } | + { 'protected' : null }; +export interface DeviceRegistrationInfo { + 'tentative_device' : [] | [DeviceData], + 'expiration' : Timestamp, +} +export interface DeviceWithUsage { + 'alias' : string, + 'last_usage' : [] | [Timestamp], + 'metadata' : [] | [MetadataMap], + 'origin' : [] | [string], + 'protection' : DeviceProtection, + 'pubkey' : DeviceKey, + 'key_type' : KeyType, + 'purpose' : Purpose, + 'credential_id' : [] | [CredentialId], +} +export type FrontendHostname = string; +export type GetDelegationResponse = { 'no_such_delegation' : null } | + { 'signed_delegation' : SignedDelegation }; +export type GetIdAliasError = { 'InternalCanisterError' : string } | + { 'Unauthorized' : Principal } | + { 'NoSuchCredentials' : string }; +export interface GetIdAliasRequest { + 'rp_id_alias_jwt' : string, + 'issuer' : FrontendHostname, + 'issuer_id_alias_jwt' : string, + 'relying_party' : FrontendHostname, + 'identity_number' : IdentityNumber, +} +export type HeaderField = [string, string]; +export interface HttpRequest { + 'url' : string, + 'method' : string, + 'body' : Uint8Array | number[], + 'headers' : Array, + 'certificate_version' : [] | [number], +} +export interface HttpResponse { + 'body' : Uint8Array | number[], + 'headers' : Array, + 'upgrade' : [] | [boolean], + 'streaming_strategy' : [] | [StreamingStrategy], + 'status_code' : number, +} +export interface IdAliasCredentials { + 'rp_id_alias_credential' : SignedIdAlias, + 'issuer_id_alias_credential' : SignedIdAlias, +} +export interface IdentityAnchorInfo { + 'devices' : Array, + 'device_registration' : [] | [DeviceRegistrationInfo], +} +export interface IdentityAuthnInfo { + 'authn_methods' : Array, + 'recovery_authn_methods' : Array, +} +export interface IdentityInfo { + 'authn_methods' : Array, + 'metadata' : MetadataMapV2, + 'authn_method_registration' : [] | [AuthnMethodRegistrationInfo], +} +export type IdentityInfoError = { 'InternalCanisterError' : string } | + { 'Unauthorized' : Principal }; +export type IdentityMetadataReplaceError = { + 'InternalCanisterError' : string + } | + { 'Unauthorized' : Principal } | + { + 'StorageSpaceExceeded' : { + 'space_required' : bigint, + 'space_available' : bigint, + } + }; +export type IdentityNumber = bigint; +export type IdentityRegisterError = { 'BadCaptcha' : null } | + { 'CanisterFull' : null } | + { 'InvalidMetadata' : string }; +export interface InternetIdentityInit { + 'max_num_latest_delegation_origins' : [] | [bigint], + 'assigned_user_number_range' : [] | [[bigint, bigint]], + 'max_inflight_captchas' : [] | [bigint], + 'archive_config' : [] | [ArchiveConfig], + 'canister_creation_cycles_cost' : [] | [bigint], + 'register_rate_limit' : [] | [RateLimitConfig], +} +export interface InternetIdentityStats { + 'storage_layout_version' : number, + 'users_registered' : bigint, + 'max_num_latest_delegation_origins' : bigint, + 'assigned_user_number_range' : [bigint, bigint], + 'latest_delegation_origins' : Array, + 'archive_info' : ArchiveInfo, + 'canister_creation_cycles_cost' : bigint, +} +export type KeyType = { 'platform' : null } | + { 'seed_phrase' : null } | + { 'cross_platform' : null } | + { 'unknown' : null } | + { 'browser_storage_key' : null }; +export type MetadataMap = Array< + [ + string, + { 'map' : MetadataMap } | + { 'string' : string } | + { 'bytes' : Uint8Array | number[] }, + ] +>; +export type MetadataMapV2 = Array< + [ + string, + { 'Map' : MetadataMapV2 } | + { 'String' : string } | + { 'Bytes' : Uint8Array | number[] }, + ] +>; +export type PrepareIdAliasError = { 'InternalCanisterError' : string } | + { 'Unauthorized' : Principal }; +export interface PrepareIdAliasRequest { + 'issuer' : FrontendHostname, + 'relying_party' : FrontendHostname, + 'identity_number' : IdentityNumber, +} +export interface PreparedIdAlias { + 'rp_id_alias_jwt' : string, + 'issuer_id_alias_jwt' : string, + 'canister_sig_pk_der' : PublicKey, +} +export type PublicKey = Uint8Array | number[]; +export interface PublicKeyAuthn { 'pubkey' : PublicKey } +export type Purpose = { 'authentication' : null } | + { 'recovery' : null }; +export interface RateLimitConfig { + 'max_tokens' : bigint, + 'time_per_token_ns' : bigint, +} +export type RegisterResponse = { 'bad_challenge' : null } | + { 'canister_full' : null } | + { 'registered' : { 'user_number' : UserNumber } }; +export type SessionKey = PublicKey; +export interface SignedDelegation { + 'signature' : Uint8Array | number[], + 'delegation' : Delegation, +} +export interface SignedIdAlias { + 'credential_jws' : string, + 'id_alias' : Principal, + 'id_dapp' : Principal, +} +export interface StreamingCallbackHttpResponse { + 'token' : [] | [Token], + 'body' : Uint8Array | number[], +} +export type StreamingStrategy = { + 'Callback' : { 'token' : Token, 'callback' : [Principal, string] } + }; +export type Timestamp = bigint; +export type Token = {}; +export type UserKey = PublicKey; +export type UserNumber = bigint; +export type VerifyTentativeDeviceResponse = { + 'device_registration_mode_off' : null + } | + { 'verified' : null } | + { 'wrong_code' : { 'retries_left' : number } } | + { 'no_device_to_verify' : null }; +export interface WebAuthn { + 'pubkey' : PublicKey, + 'credential_id' : CredentialId, +} +export interface WebAuthnCredential { + 'pubkey' : PublicKey, + 'credential_id' : CredentialId, +} +export interface _SERVICE { + 'acknowledge_entries' : ActorMethod<[bigint], undefined>, + 'add' : ActorMethod<[UserNumber, DeviceData], undefined>, + 'add_tentative_device' : ActorMethod< + [UserNumber, DeviceData], + AddTentativeDeviceResponse + >, + 'authn_method_add' : ActorMethod< + [IdentityNumber, AuthnMethodData], + { 'Ok' : null } | + { 'Err' : AuthnMethodAddError } + >, + 'authn_method_confirm' : ActorMethod< + [IdentityNumber, string], + { 'Ok' : null } | + { 'Err' : AuthnMethodConfirmationError } + >, + 'authn_method_metadata_replace' : ActorMethod< + [IdentityNumber, PublicKey, MetadataMapV2], + { 'Ok' : null } | + { 'Err' : AuthnMethodMetadataReplaceError } + >, + 'authn_method_register' : ActorMethod< + [IdentityNumber, AuthnMethodData], + { 'Ok' : AuthnMethodConfirmationCode } | + { 'Err' : AuthnMethodRegisterError } + >, + 'authn_method_registration_mode_enter' : ActorMethod< + [IdentityNumber], + { 'Ok' : { 'expiration' : Timestamp } } | + { 'Err' : null } + >, + 'authn_method_registration_mode_exit' : ActorMethod< + [IdentityNumber], + { 'Ok' : null } | + { 'Err' : null } + >, + 'authn_method_remove' : ActorMethod< + [IdentityNumber, PublicKey], + { 'Ok' : null } | + { 'Err' : null } + >, + 'authn_method_replace' : ActorMethod< + [IdentityNumber, PublicKey, AuthnMethodData], + { 'Ok' : null } | + { 'Err' : AuthnMethodReplaceError } + >, + 'authn_method_security_settings_replace' : ActorMethod< + [IdentityNumber, PublicKey, AuthnMethodSecuritySettings], + { 'Ok' : null } | + { 'Err' : AuthnMethodSecuritySettingsReplaceError } + >, + 'captcha_create' : ActorMethod<[], { 'Ok' : Challenge } | { 'Err' : null }>, + 'create_challenge' : ActorMethod<[], Challenge>, + 'deploy_archive' : ActorMethod<[Uint8Array | number[]], DeployArchiveResult>, + 'enter_device_registration_mode' : ActorMethod<[UserNumber], Timestamp>, + 'exit_device_registration_mode' : ActorMethod<[UserNumber], undefined>, + 'fetch_entries' : ActorMethod<[], Array>, + 'get_anchor_credentials' : ActorMethod<[UserNumber], AnchorCredentials>, + 'get_anchor_info' : ActorMethod<[UserNumber], IdentityAnchorInfo>, + 'get_delegation' : ActorMethod< + [UserNumber, FrontendHostname, SessionKey, Timestamp], + GetDelegationResponse + >, + 'get_id_alias' : ActorMethod< + [GetIdAliasRequest], + { 'Ok' : IdAliasCredentials } | + { 'Err' : GetIdAliasError } + >, + 'get_principal' : ActorMethod<[UserNumber, FrontendHostname], Principal>, + 'http_request' : ActorMethod<[HttpRequest], HttpResponse>, + 'http_request_update' : ActorMethod<[HttpRequest], HttpResponse>, + 'identity_authn_info' : ActorMethod< + [IdentityNumber], + { 'Ok' : IdentityAuthnInfo } | + { 'Err' : null } + >, + 'identity_info' : ActorMethod< + [IdentityNumber], + { 'Ok' : IdentityInfo } | + { 'Err' : IdentityInfoError } + >, + 'identity_metadata_replace' : ActorMethod< + [IdentityNumber, MetadataMapV2], + { 'Ok' : null } | + { 'Err' : IdentityMetadataReplaceError } + >, + 'identity_register' : ActorMethod< + [AuthnMethodData, CaptchaResult, [] | [Principal]], + { 'Ok' : IdentityNumber } | + { 'Err' : IdentityRegisterError } + >, + 'init_salt' : ActorMethod<[], undefined>, + 'lookup' : ActorMethod<[UserNumber], Array>, + 'prepare_delegation' : ActorMethod< + [UserNumber, FrontendHostname, SessionKey, [] | [bigint]], + [UserKey, Timestamp] + >, + 'prepare_id_alias' : ActorMethod< + [PrepareIdAliasRequest], + { 'Ok' : PreparedIdAlias } | + { 'Err' : PrepareIdAliasError } + >, + 'register' : ActorMethod< + [DeviceData, ChallengeResult, [] | [Principal]], + RegisterResponse + >, + 'remove' : ActorMethod<[UserNumber, DeviceKey], undefined>, + 'replace' : ActorMethod<[UserNumber, DeviceKey, DeviceData], undefined>, + 'stats' : ActorMethod<[], InternetIdentityStats>, + 'update' : ActorMethod<[UserNumber, DeviceKey, DeviceData], undefined>, + 'verify_tentative_device' : ActorMethod< + [UserNumber, string], + VerifyTentativeDeviceResponse + >, +} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did.js b/e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did.js new file mode 100644 index 0000000000..10fa633a5c --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/internet_identity/internet_identity.did.js @@ -0,0 +1,506 @@ +export const idlFactory = ({ IDL }) => { + const MetadataMap = IDL.Rec(); + const MetadataMapV2 = IDL.Rec(); + const ArchiveConfig = IDL.Record({ + 'polling_interval_ns' : IDL.Nat64, + 'entries_buffer_limit' : IDL.Nat64, + 'module_hash' : IDL.Vec(IDL.Nat8), + 'entries_fetch_limit' : IDL.Nat16, + }); + const RateLimitConfig = IDL.Record({ + 'max_tokens' : IDL.Nat64, + 'time_per_token_ns' : IDL.Nat64, + }); + const InternetIdentityInit = IDL.Record({ + 'max_num_latest_delegation_origins' : IDL.Opt(IDL.Nat64), + 'assigned_user_number_range' : IDL.Opt(IDL.Tuple(IDL.Nat64, IDL.Nat64)), + 'max_inflight_captchas' : IDL.Opt(IDL.Nat64), + 'archive_config' : IDL.Opt(ArchiveConfig), + 'canister_creation_cycles_cost' : IDL.Opt(IDL.Nat64), + 'register_rate_limit' : IDL.Opt(RateLimitConfig), + }); + const UserNumber = IDL.Nat64; + MetadataMap.fill( + IDL.Vec( + IDL.Tuple( + IDL.Text, + IDL.Variant({ + 'map' : MetadataMap, + 'string' : IDL.Text, + 'bytes' : IDL.Vec(IDL.Nat8), + }), + ) + ) + ); + const DeviceProtection = IDL.Variant({ + 'unprotected' : IDL.Null, + 'protected' : IDL.Null, + }); + const PublicKey = IDL.Vec(IDL.Nat8); + const DeviceKey = PublicKey; + const KeyType = IDL.Variant({ + 'platform' : IDL.Null, + 'seed_phrase' : IDL.Null, + 'cross_platform' : IDL.Null, + 'unknown' : IDL.Null, + 'browser_storage_key' : IDL.Null, + }); + const Purpose = IDL.Variant({ + 'authentication' : IDL.Null, + 'recovery' : IDL.Null, + }); + const CredentialId = IDL.Vec(IDL.Nat8); + const DeviceData = IDL.Record({ + 'alias' : IDL.Text, + 'metadata' : IDL.Opt(MetadataMap), + 'origin' : IDL.Opt(IDL.Text), + 'protection' : DeviceProtection, + 'pubkey' : DeviceKey, + 'key_type' : KeyType, + 'purpose' : Purpose, + 'credential_id' : IDL.Opt(CredentialId), + }); + const Timestamp = IDL.Nat64; + const AddTentativeDeviceResponse = IDL.Variant({ + 'device_registration_mode_off' : IDL.Null, + 'another_device_tentatively_added' : IDL.Null, + 'added_tentatively' : IDL.Record({ + 'verification_code' : IDL.Text, + 'device_registration_timeout' : Timestamp, + }), + }); + const IdentityNumber = IDL.Nat64; + const AuthnMethodProtection = IDL.Variant({ + 'Protected' : IDL.Null, + 'Unprotected' : IDL.Null, + }); + const AuthnMethodPurpose = IDL.Variant({ + 'Recovery' : IDL.Null, + 'Authentication' : IDL.Null, + }); + const AuthnMethodSecuritySettings = IDL.Record({ + 'protection' : AuthnMethodProtection, + 'purpose' : AuthnMethodPurpose, + }); + MetadataMapV2.fill( + IDL.Vec( + IDL.Tuple( + IDL.Text, + IDL.Variant({ + 'Map' : MetadataMapV2, + 'String' : IDL.Text, + 'Bytes' : IDL.Vec(IDL.Nat8), + }), + ) + ) + ); + const PublicKeyAuthn = IDL.Record({ 'pubkey' : PublicKey }); + const WebAuthn = IDL.Record({ + 'pubkey' : PublicKey, + 'credential_id' : CredentialId, + }); + const AuthnMethod = IDL.Variant({ + 'PubKey' : PublicKeyAuthn, + 'WebAuthn' : WebAuthn, + }); + const AuthnMethodData = IDL.Record({ + 'security_settings' : AuthnMethodSecuritySettings, + 'metadata' : MetadataMapV2, + 'last_authentication' : IDL.Opt(Timestamp), + 'authn_method' : AuthnMethod, + }); + const AuthnMethodAddError = IDL.Variant({ 'InvalidMetadata' : IDL.Text }); + const AuthnMethodConfirmationError = IDL.Variant({ + 'RegistrationModeOff' : IDL.Null, + 'NoAuthnMethodToConfirm' : IDL.Null, + 'WrongCode' : IDL.Record({ 'retries_left' : IDL.Nat8 }), + }); + const AuthnMethodMetadataReplaceError = IDL.Variant({ + 'AuthnMethodNotFound' : IDL.Null, + 'InvalidMetadata' : IDL.Text, + }); + const AuthnMethodConfirmationCode = IDL.Record({ + 'confirmation_code' : IDL.Text, + 'expiration' : Timestamp, + }); + const AuthnMethodRegisterError = IDL.Variant({ + 'RegistrationModeOff' : IDL.Null, + 'RegistrationAlreadyInProgress' : IDL.Null, + 'InvalidMetadata' : IDL.Text, + }); + const AuthnMethodReplaceError = IDL.Variant({ + 'AuthnMethodNotFound' : IDL.Null, + 'InvalidMetadata' : IDL.Text, + }); + const AuthnMethodSecuritySettingsReplaceError = IDL.Variant({ + 'AuthnMethodNotFound' : IDL.Null, + }); + const ChallengeKey = IDL.Text; + const Challenge = IDL.Record({ + 'png_base64' : IDL.Text, + 'challenge_key' : ChallengeKey, + }); + const DeployArchiveResult = IDL.Variant({ + 'creation_in_progress' : IDL.Null, + 'success' : IDL.Principal, + 'failed' : IDL.Text, + }); + const BufferedArchiveEntry = IDL.Record({ + 'sequence_number' : IDL.Nat64, + 'entry' : IDL.Vec(IDL.Nat8), + 'anchor_number' : UserNumber, + 'timestamp' : Timestamp, + }); + const WebAuthnCredential = IDL.Record({ + 'pubkey' : PublicKey, + 'credential_id' : CredentialId, + }); + const AnchorCredentials = IDL.Record({ + 'recovery_phrases' : IDL.Vec(PublicKey), + 'credentials' : IDL.Vec(WebAuthnCredential), + 'recovery_credentials' : IDL.Vec(WebAuthnCredential), + }); + const DeviceWithUsage = IDL.Record({ + 'alias' : IDL.Text, + 'last_usage' : IDL.Opt(Timestamp), + 'metadata' : IDL.Opt(MetadataMap), + 'origin' : IDL.Opt(IDL.Text), + 'protection' : DeviceProtection, + 'pubkey' : DeviceKey, + 'key_type' : KeyType, + 'purpose' : Purpose, + 'credential_id' : IDL.Opt(CredentialId), + }); + const DeviceRegistrationInfo = IDL.Record({ + 'tentative_device' : IDL.Opt(DeviceData), + 'expiration' : Timestamp, + }); + const IdentityAnchorInfo = IDL.Record({ + 'devices' : IDL.Vec(DeviceWithUsage), + 'device_registration' : IDL.Opt(DeviceRegistrationInfo), + }); + const FrontendHostname = IDL.Text; + const SessionKey = PublicKey; + const Delegation = IDL.Record({ + 'pubkey' : PublicKey, + 'targets' : IDL.Opt(IDL.Vec(IDL.Principal)), + 'expiration' : Timestamp, + }); + const SignedDelegation = IDL.Record({ + 'signature' : IDL.Vec(IDL.Nat8), + 'delegation' : Delegation, + }); + const GetDelegationResponse = IDL.Variant({ + 'no_such_delegation' : IDL.Null, + 'signed_delegation' : SignedDelegation, + }); + const GetIdAliasRequest = IDL.Record({ + 'rp_id_alias_jwt' : IDL.Text, + 'issuer' : FrontendHostname, + 'issuer_id_alias_jwt' : IDL.Text, + 'relying_party' : FrontendHostname, + 'identity_number' : IdentityNumber, + }); + const SignedIdAlias = IDL.Record({ + 'credential_jws' : IDL.Text, + 'id_alias' : IDL.Principal, + 'id_dapp' : IDL.Principal, + }); + const IdAliasCredentials = IDL.Record({ + 'rp_id_alias_credential' : SignedIdAlias, + 'issuer_id_alias_credential' : SignedIdAlias, + }); + const GetIdAliasError = IDL.Variant({ + 'InternalCanisterError' : IDL.Text, + 'Unauthorized' : IDL.Principal, + 'NoSuchCredentials' : IDL.Text, + }); + const HeaderField = IDL.Tuple(IDL.Text, IDL.Text); + const HttpRequest = IDL.Record({ + 'url' : IDL.Text, + 'method' : IDL.Text, + 'body' : IDL.Vec(IDL.Nat8), + 'headers' : IDL.Vec(HeaderField), + 'certificate_version' : IDL.Opt(IDL.Nat16), + }); + const Token = IDL.Record({}); + const StreamingCallbackHttpResponse = IDL.Record({ + 'token' : IDL.Opt(Token), + 'body' : IDL.Vec(IDL.Nat8), + }); + const StreamingStrategy = IDL.Variant({ + 'Callback' : IDL.Record({ + 'token' : Token, + 'callback' : IDL.Func( + [Token], + [StreamingCallbackHttpResponse], + ['query'], + ), + }), + }); + const HttpResponse = IDL.Record({ + 'body' : IDL.Vec(IDL.Nat8), + 'headers' : IDL.Vec(HeaderField), + 'upgrade' : IDL.Opt(IDL.Bool), + 'streaming_strategy' : IDL.Opt(StreamingStrategy), + 'status_code' : IDL.Nat16, + }); + const IdentityAuthnInfo = IDL.Record({ + 'authn_methods' : IDL.Vec(AuthnMethod), + 'recovery_authn_methods' : IDL.Vec(AuthnMethod), + }); + const AuthnMethodRegistrationInfo = IDL.Record({ + 'expiration' : Timestamp, + 'authn_method' : IDL.Opt(AuthnMethodData), + }); + const IdentityInfo = IDL.Record({ + 'authn_methods' : IDL.Vec(AuthnMethodData), + 'metadata' : MetadataMapV2, + 'authn_method_registration' : IDL.Opt(AuthnMethodRegistrationInfo), + }); + const IdentityInfoError = IDL.Variant({ + 'InternalCanisterError' : IDL.Text, + 'Unauthorized' : IDL.Principal, + }); + const IdentityMetadataReplaceError = IDL.Variant({ + 'InternalCanisterError' : IDL.Text, + 'Unauthorized' : IDL.Principal, + 'StorageSpaceExceeded' : IDL.Record({ + 'space_required' : IDL.Nat64, + 'space_available' : IDL.Nat64, + }), + }); + const ChallengeResult = IDL.Record({ + 'key' : ChallengeKey, + 'chars' : IDL.Text, + }); + const CaptchaResult = ChallengeResult; + const IdentityRegisterError = IDL.Variant({ + 'BadCaptcha' : IDL.Null, + 'CanisterFull' : IDL.Null, + 'InvalidMetadata' : IDL.Text, + }); + const UserKey = PublicKey; + const PrepareIdAliasRequest = IDL.Record({ + 'issuer' : FrontendHostname, + 'relying_party' : FrontendHostname, + 'identity_number' : IdentityNumber, + }); + const PreparedIdAlias = IDL.Record({ + 'rp_id_alias_jwt' : IDL.Text, + 'issuer_id_alias_jwt' : IDL.Text, + 'canister_sig_pk_der' : PublicKey, + }); + const PrepareIdAliasError = IDL.Variant({ + 'InternalCanisterError' : IDL.Text, + 'Unauthorized' : IDL.Principal, + }); + const RegisterResponse = IDL.Variant({ + 'bad_challenge' : IDL.Null, + 'canister_full' : IDL.Null, + 'registered' : IDL.Record({ 'user_number' : UserNumber }), + }); + const ArchiveInfo = IDL.Record({ + 'archive_config' : IDL.Opt(ArchiveConfig), + 'archive_canister' : IDL.Opt(IDL.Principal), + }); + const InternetIdentityStats = IDL.Record({ + 'storage_layout_version' : IDL.Nat8, + 'users_registered' : IDL.Nat64, + 'max_num_latest_delegation_origins' : IDL.Nat64, + 'assigned_user_number_range' : IDL.Tuple(IDL.Nat64, IDL.Nat64), + 'latest_delegation_origins' : IDL.Vec(FrontendHostname), + 'archive_info' : ArchiveInfo, + 'canister_creation_cycles_cost' : IDL.Nat64, + }); + const VerifyTentativeDeviceResponse = IDL.Variant({ + 'device_registration_mode_off' : IDL.Null, + 'verified' : IDL.Null, + 'wrong_code' : IDL.Record({ 'retries_left' : IDL.Nat8 }), + 'no_device_to_verify' : IDL.Null, + }); + return IDL.Service({ + 'acknowledge_entries' : IDL.Func([IDL.Nat64], [], []), + 'add' : IDL.Func([UserNumber, DeviceData], [], []), + 'add_tentative_device' : IDL.Func( + [UserNumber, DeviceData], + [AddTentativeDeviceResponse], + [], + ), + 'authn_method_add' : IDL.Func( + [IdentityNumber, AuthnMethodData], + [IDL.Variant({ 'Ok' : IDL.Null, 'Err' : AuthnMethodAddError })], + [], + ), + 'authn_method_confirm' : IDL.Func( + [IdentityNumber, IDL.Text], + [ + IDL.Variant({ + 'Ok' : IDL.Null, + 'Err' : AuthnMethodConfirmationError, + }), + ], + [], + ), + 'authn_method_metadata_replace' : IDL.Func( + [IdentityNumber, PublicKey, MetadataMapV2], + [ + IDL.Variant({ + 'Ok' : IDL.Null, + 'Err' : AuthnMethodMetadataReplaceError, + }), + ], + [], + ), + 'authn_method_register' : IDL.Func( + [IdentityNumber, AuthnMethodData], + [ + IDL.Variant({ + 'Ok' : AuthnMethodConfirmationCode, + 'Err' : AuthnMethodRegisterError, + }), + ], + [], + ), + 'authn_method_registration_mode_enter' : IDL.Func( + [IdentityNumber], + [ + IDL.Variant({ + 'Ok' : IDL.Record({ 'expiration' : Timestamp }), + 'Err' : IDL.Null, + }), + ], + [], + ), + 'authn_method_registration_mode_exit' : IDL.Func( + [IdentityNumber], + [IDL.Variant({ 'Ok' : IDL.Null, 'Err' : IDL.Null })], + [], + ), + 'authn_method_remove' : IDL.Func( + [IdentityNumber, PublicKey], + [IDL.Variant({ 'Ok' : IDL.Null, 'Err' : IDL.Null })], + [], + ), + 'authn_method_replace' : IDL.Func( + [IdentityNumber, PublicKey, AuthnMethodData], + [IDL.Variant({ 'Ok' : IDL.Null, 'Err' : AuthnMethodReplaceError })], + [], + ), + 'authn_method_security_settings_replace' : IDL.Func( + [IdentityNumber, PublicKey, AuthnMethodSecuritySettings], + [ + IDL.Variant({ + 'Ok' : IDL.Null, + 'Err' : AuthnMethodSecuritySettingsReplaceError, + }), + ], + [], + ), + 'captcha_create' : IDL.Func( + [], + [IDL.Variant({ 'Ok' : Challenge, 'Err' : IDL.Null })], + [], + ), + 'create_challenge' : IDL.Func([], [Challenge], []), + 'deploy_archive' : IDL.Func([IDL.Vec(IDL.Nat8)], [DeployArchiveResult], []), + 'enter_device_registration_mode' : IDL.Func([UserNumber], [Timestamp], []), + 'exit_device_registration_mode' : IDL.Func([UserNumber], [], []), + 'fetch_entries' : IDL.Func([], [IDL.Vec(BufferedArchiveEntry)], []), + 'get_anchor_credentials' : IDL.Func( + [UserNumber], + [AnchorCredentials], + ['query'], + ), + 'get_anchor_info' : IDL.Func([UserNumber], [IdentityAnchorInfo], []), + 'get_delegation' : IDL.Func( + [UserNumber, FrontendHostname, SessionKey, Timestamp], + [GetDelegationResponse], + ['query'], + ), + 'get_id_alias' : IDL.Func( + [GetIdAliasRequest], + [IDL.Variant({ 'Ok' : IdAliasCredentials, 'Err' : GetIdAliasError })], + ['query'], + ), + 'get_principal' : IDL.Func( + [UserNumber, FrontendHostname], + [IDL.Principal], + ['query'], + ), + 'http_request' : IDL.Func([HttpRequest], [HttpResponse], ['query']), + 'http_request_update' : IDL.Func([HttpRequest], [HttpResponse], []), + 'identity_authn_info' : IDL.Func( + [IdentityNumber], + [IDL.Variant({ 'Ok' : IdentityAuthnInfo, 'Err' : IDL.Null })], + ['query'], + ), + 'identity_info' : IDL.Func( + [IdentityNumber], + [IDL.Variant({ 'Ok' : IdentityInfo, 'Err' : IdentityInfoError })], + [], + ), + 'identity_metadata_replace' : IDL.Func( + [IdentityNumber, MetadataMapV2], + [ + IDL.Variant({ + 'Ok' : IDL.Null, + 'Err' : IdentityMetadataReplaceError, + }), + ], + [], + ), + 'identity_register' : IDL.Func( + [AuthnMethodData, CaptchaResult, IDL.Opt(IDL.Principal)], + [IDL.Variant({ 'Ok' : IdentityNumber, 'Err' : IdentityRegisterError })], + [], + ), + 'init_salt' : IDL.Func([], [], []), + 'lookup' : IDL.Func([UserNumber], [IDL.Vec(DeviceData)], ['query']), + 'prepare_delegation' : IDL.Func( + [UserNumber, FrontendHostname, SessionKey, IDL.Opt(IDL.Nat64)], + [UserKey, Timestamp], + [], + ), + 'prepare_id_alias' : IDL.Func( + [PrepareIdAliasRequest], + [IDL.Variant({ 'Ok' : PreparedIdAlias, 'Err' : PrepareIdAliasError })], + [], + ), + 'register' : IDL.Func( + [DeviceData, ChallengeResult, IDL.Opt(IDL.Principal)], + [RegisterResponse], + [], + ), + 'remove' : IDL.Func([UserNumber, DeviceKey], [], []), + 'replace' : IDL.Func([UserNumber, DeviceKey, DeviceData], [], []), + 'stats' : IDL.Func([], [InternetIdentityStats], ['query']), + 'update' : IDL.Func([UserNumber, DeviceKey, DeviceData], [], []), + 'verify_tentative_device' : IDL.Func( + [UserNumber, IDL.Text], + [VerifyTentativeDeviceResponse], + [], + ), + }); +}; +export const init = ({ IDL }) => { + const ArchiveConfig = IDL.Record({ + 'polling_interval_ns' : IDL.Nat64, + 'entries_buffer_limit' : IDL.Nat64, + 'module_hash' : IDL.Vec(IDL.Nat8), + 'entries_fetch_limit' : IDL.Nat16, + }); + const RateLimitConfig = IDL.Record({ + 'max_tokens' : IDL.Nat64, + 'time_per_token_ns' : IDL.Nat64, + }); + const InternetIdentityInit = IDL.Record({ + 'max_num_latest_delegation_origins' : IDL.Opt(IDL.Nat64), + 'assigned_user_number_range' : IDL.Opt(IDL.Tuple(IDL.Nat64, IDL.Nat64)), + 'max_inflight_captchas' : IDL.Opt(IDL.Nat64), + 'archive_config' : IDL.Opt(ArchiveConfig), + 'canister_creation_cycles_cost' : IDL.Opt(IDL.Nat64), + 'register_rate_limit' : IDL.Opt(RateLimitConfig), + }); + return [IDL.Opt(InternetIdentityInit)]; +}; diff --git a/e2e/assets/wrong_ids/src/declarations/main/index.d.ts b/e2e/assets/wrong_ids/src/declarations/main/index.d.ts new file mode 100644 index 0000000000..19ffd8588a --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/main/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './main.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const main: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/main/index.js b/e2e/assets/wrong_ids/src/declarations/main/index.js new file mode 100644 index 0000000000..a6a8ee052f --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/main/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./main.did.js"; +export { idlFactory } from "./main.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_MAIN; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const main = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/main/main.did b/e2e/assets/wrong_ids/src/declarations/main/main.did new file mode 100644 index 0000000000..c284ef2a3e --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/main/main.did @@ -0,0 +1,48 @@ +type ZonBackend = + service { + createItemData: (ItemTransferWithoutOwner) -> (principal, nat); + getRootItem: () -> (opt record { + principal; + nat; + }) query; + /// Affiliates /// + get_trusted_origins: () -> (vec text); + init: () -> (); + removeItem: (principal, nat) -> () oneway; + removeMainOwner: () -> () oneway; + removeUser: (principal) -> () oneway; + setItemData: (principal, nat, ItemDataWithoutOwner) -> () oneway; + setMainOwner: (principal) -> () oneway; + setPostText: (principal, nat, text) -> () oneway; + setRootItem: (principal, nat) -> (); + setUserData: (opt principal, User) -> () oneway; + }; +type User = + record { + description: text; + link: text; + locale: text; + nick: text; + title: text; + }; +type ItemTransferWithoutOwner = + record { + communal: bool; + data: ItemDataWithoutOwner; + }; +type ItemDetails = + variant { + folder; + link: text; + message; + post; + }; +type ItemDataWithoutOwner = + record { + description: text; + details: ItemDetails; + locale: text; + price: float64; + title: text; + }; +service : () -> ZonBackend diff --git a/e2e/assets/wrong_ids/src/declarations/main/main.did.d.ts b/e2e/assets/wrong_ids/src/declarations/main/main.did.d.ts new file mode 100644 index 0000000000..5206f18795 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/main/main.did.d.ts @@ -0,0 +1,49 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export interface ItemDataWithoutOwner { + 'title' : string, + 'locale' : string, + 'description' : string, + 'details' : ItemDetails, + 'price' : number, +} +export type ItemDetails = { 'link' : string } | + { 'post' : null } | + { 'message' : null } | + { 'folder' : null }; +export interface ItemTransferWithoutOwner { + 'data' : ItemDataWithoutOwner, + 'communal' : boolean, +} +export interface User { + 'title' : string, + 'link' : string, + 'nick' : string, + 'locale' : string, + 'description' : string, +} +export interface ZonBackend { + 'createItemData' : ActorMethod< + [ItemTransferWithoutOwner], + [Principal, bigint] + >, + 'getRootItem' : ActorMethod<[], [] | [[Principal, bigint]]>, + 'get_trusted_origins' : ActorMethod<[], Array>, + 'init' : ActorMethod<[], undefined>, + 'removeItem' : ActorMethod<[Principal, bigint], undefined>, + 'removeMainOwner' : ActorMethod<[], undefined>, + 'removeUser' : ActorMethod<[Principal], undefined>, + 'setItemData' : ActorMethod< + [Principal, bigint, ItemDataWithoutOwner], + undefined + >, + 'setMainOwner' : ActorMethod<[Principal], undefined>, + 'setPostText' : ActorMethod<[Principal, bigint, string], undefined>, + 'setRootItem' : ActorMethod<[Principal, bigint], undefined>, + 'setUserData' : ActorMethod<[[] | [Principal], User], undefined>, +} +export interface _SERVICE extends ZonBackend {} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/main/main.did.js b/e2e/assets/wrong_ids/src/declarations/main/main.did.js new file mode 100644 index 0000000000..70a88fced2 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/main/main.did.js @@ -0,0 +1,58 @@ +export const idlFactory = ({ IDL }) => { + const ItemDetails = IDL.Variant({ + 'link' : IDL.Text, + 'post' : IDL.Null, + 'message' : IDL.Null, + 'folder' : IDL.Null, + }); + const ItemDataWithoutOwner = IDL.Record({ + 'title' : IDL.Text, + 'locale' : IDL.Text, + 'description' : IDL.Text, + 'details' : ItemDetails, + 'price' : IDL.Float64, + }); + const ItemTransferWithoutOwner = IDL.Record({ + 'data' : ItemDataWithoutOwner, + 'communal' : IDL.Bool, + }); + const User = IDL.Record({ + 'title' : IDL.Text, + 'link' : IDL.Text, + 'nick' : IDL.Text, + 'locale' : IDL.Text, + 'description' : IDL.Text, + }); + const ZonBackend = IDL.Service({ + 'createItemData' : IDL.Func( + [ItemTransferWithoutOwner], + [IDL.Principal, IDL.Nat], + [], + ), + 'getRootItem' : IDL.Func( + [], + [IDL.Opt(IDL.Tuple(IDL.Principal, IDL.Nat))], + ['query'], + ), + 'get_trusted_origins' : IDL.Func([], [IDL.Vec(IDL.Text)], []), + 'init' : IDL.Func([], [], []), + 'removeItem' : IDL.Func([IDL.Principal, IDL.Nat], [], ['oneway']), + 'removeMainOwner' : IDL.Func([], [], ['oneway']), + 'removeUser' : IDL.Func([IDL.Principal], [], ['oneway']), + 'setItemData' : IDL.Func( + [IDL.Principal, IDL.Nat, ItemDataWithoutOwner], + [], + ['oneway'], + ), + 'setMainOwner' : IDL.Func([IDL.Principal], [], ['oneway']), + 'setPostText' : IDL.Func( + [IDL.Principal, IDL.Nat, IDL.Text], + [], + ['oneway'], + ), + 'setRootItem' : IDL.Func([IDL.Principal, IDL.Nat], [], []), + 'setUserData' : IDL.Func([IDL.Opt(IDL.Principal), User], [], ['oneway']), + }); + return ZonBackend; +}; +export const init = ({ IDL }) => { return []; }; diff --git a/e2e/assets/wrong_ids/src/declarations/order/index.d.ts b/e2e/assets/wrong_ids/src/declarations/order/index.d.ts new file mode 100644 index 0000000000..d2237812a6 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/order/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './order.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const order: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/order/index.js b/e2e/assets/wrong_ids/src/declarations/order/index.js new file mode 100644 index 0000000000..a7efa0654b --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/order/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./order.did.js"; +export { idlFactory } from "./order.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_ORDER; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const order = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/order/order.did b/e2e/assets/wrong_ids/src/declarations/order/order.did new file mode 100644 index 0000000000..e3ef57933c --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/order/order.did @@ -0,0 +1,34 @@ +type Orders = + service { + addItemToFolder: (record { + principal; + nat; + }, record { + principal; + nat; + }, bool, variant { + beginning; + end; + }) -> (); + getOwners: () -> (vec principal) query; + init: (vec principal) -> (); + /// Insert item into the beginning of the global list. + insertIntoAllTimeStream: (record { + principal; + nat; + }) -> (); + /// Insert item into the beginning of the global list. + removeFromAllTimeStream: (record { + principal; + nat; + }) -> (); + removeItemLinks: (record { + principal; + nat; + }) -> (); + setOwners: (vec principal) -> (); + /// Voting /// + /// `amount == 0` means canceling the vote. + vote: (principal, nat, principal, nat, int, bool) -> (); + }; +service : () -> Orders diff --git a/e2e/assets/wrong_ids/src/declarations/order/order.did.d.ts b/e2e/assets/wrong_ids/src/declarations/order/order.did.d.ts new file mode 100644 index 0000000000..c8f8282db0 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/order/order.did.d.ts @@ -0,0 +1,29 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export interface Orders { + 'addItemToFolder' : ActorMethod< + [ + [Principal, bigint], + [Principal, bigint], + boolean, + { 'end' : null } | + { 'beginning' : null }, + ], + undefined + >, + 'getOwners' : ActorMethod<[], Array>, + 'init' : ActorMethod<[Array], undefined>, + 'insertIntoAllTimeStream' : ActorMethod<[[Principal, bigint]], undefined>, + 'removeFromAllTimeStream' : ActorMethod<[[Principal, bigint]], undefined>, + 'removeItemLinks' : ActorMethod<[[Principal, bigint]], undefined>, + 'setOwners' : ActorMethod<[Array], undefined>, + 'vote' : ActorMethod< + [Principal, bigint, Principal, bigint, bigint, boolean], + undefined + >, +} +export interface _SERVICE extends Orders {} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/order/order.did.js b/e2e/assets/wrong_ids/src/declarations/order/order.did.js new file mode 100644 index 0000000000..de2447abe4 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/order/order.did.js @@ -0,0 +1,35 @@ +export const idlFactory = ({ IDL }) => { + const Orders = IDL.Service({ + 'addItemToFolder' : IDL.Func( + [ + IDL.Tuple(IDL.Principal, IDL.Nat), + IDL.Tuple(IDL.Principal, IDL.Nat), + IDL.Bool, + IDL.Variant({ 'end' : IDL.Null, 'beginning' : IDL.Null }), + ], + [], + [], + ), + 'getOwners' : IDL.Func([], [IDL.Vec(IDL.Principal)], ['query']), + 'init' : IDL.Func([IDL.Vec(IDL.Principal)], [], []), + 'insertIntoAllTimeStream' : IDL.Func( + [IDL.Tuple(IDL.Principal, IDL.Nat)], + [], + [], + ), + 'removeFromAllTimeStream' : IDL.Func( + [IDL.Tuple(IDL.Principal, IDL.Nat)], + [], + [], + ), + 'removeItemLinks' : IDL.Func([IDL.Tuple(IDL.Principal, IDL.Nat)], [], []), + 'setOwners' : IDL.Func([IDL.Vec(IDL.Principal)], [], []), + 'vote' : IDL.Func( + [IDL.Principal, IDL.Nat, IDL.Principal, IDL.Nat, IDL.Int, IDL.Bool], + [], + [], + ), + }); + return Orders; +}; +export const init = ({ IDL }) => { return []; }; diff --git a/e2e/assets/wrong_ids/src/declarations/payments/index.d.ts b/e2e/assets/wrong_ids/src/declarations/payments/index.d.ts new file mode 100644 index 0000000000..ba3adb2ab7 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/payments/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './payments.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const payments: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/payments/index.js b/e2e/assets/wrong_ids/src/declarations/payments/index.js new file mode 100644 index 0000000000..fe780cba98 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/payments/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./payments.did.js"; +export { idlFactory } from "./payments.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_PAYMENTS; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const payments = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/payments/payments.did b/e2e/assets/wrong_ids/src/declarations/payments/payments.did new file mode 100644 index 0000000000..8e0777bf90 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/payments/payments.did @@ -0,0 +1,21 @@ +type Subaccount = blob; +type Payments = + service { + getBuyerAffiliateShare: () -> (Fraction) query; + getOurDebt: (principal) -> (nat) query; + getOwners: () -> (vec principal) query; + getSalesOwnersShare: () -> (Fraction) query; + getSellerAffiliateShare: () -> (Fraction) query; + getUploadOwnersShare: () -> (Fraction) query; + getUpvotesOwnersShare: () -> (Fraction) query; + init: (vec principal) -> (); + payout: (opt Subaccount) -> () oneway; + setBuyerAffiliateShare: (Fraction) -> () oneway; + setOwners: (vec principal) -> (); + setSalesOwnersShare: (Fraction) -> () oneway; + setSellerAffiliateShare: (Fraction) -> () oneway; + setUploadOwnersShare: (Fraction) -> () oneway; + setUpvotesOwnersShare: (Fraction) -> () oneway; + }; +type Fraction = int; +service : () -> Payments diff --git a/e2e/assets/wrong_ids/src/declarations/payments/payments.did.d.ts b/e2e/assets/wrong_ids/src/declarations/payments/payments.did.d.ts new file mode 100644 index 0000000000..40f7de612f --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/payments/payments.did.d.ts @@ -0,0 +1,26 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export type Fraction = bigint; +export interface Payments { + 'getBuyerAffiliateShare' : ActorMethod<[], Fraction>, + 'getOurDebt' : ActorMethod<[Principal], bigint>, + 'getOwners' : ActorMethod<[], Array>, + 'getSalesOwnersShare' : ActorMethod<[], Fraction>, + 'getSellerAffiliateShare' : ActorMethod<[], Fraction>, + 'getUploadOwnersShare' : ActorMethod<[], Fraction>, + 'getUpvotesOwnersShare' : ActorMethod<[], Fraction>, + 'init' : ActorMethod<[Array], undefined>, + 'payout' : ActorMethod<[[] | [Subaccount]], undefined>, + 'setBuyerAffiliateShare' : ActorMethod<[Fraction], undefined>, + 'setOwners' : ActorMethod<[Array], undefined>, + 'setSalesOwnersShare' : ActorMethod<[Fraction], undefined>, + 'setSellerAffiliateShare' : ActorMethod<[Fraction], undefined>, + 'setUploadOwnersShare' : ActorMethod<[Fraction], undefined>, + 'setUpvotesOwnersShare' : ActorMethod<[Fraction], undefined>, +} +export type Subaccount = Uint8Array | number[]; +export interface _SERVICE extends Payments {} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/payments/payments.did.js b/e2e/assets/wrong_ids/src/declarations/payments/payments.did.js new file mode 100644 index 0000000000..40944afa21 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/payments/payments.did.js @@ -0,0 +1,23 @@ +export const idlFactory = ({ IDL }) => { + const Fraction = IDL.Int; + const Subaccount = IDL.Vec(IDL.Nat8); + const Payments = IDL.Service({ + 'getBuyerAffiliateShare' : IDL.Func([], [Fraction], ['query']), + 'getOurDebt' : IDL.Func([IDL.Principal], [IDL.Nat], ['query']), + 'getOwners' : IDL.Func([], [IDL.Vec(IDL.Principal)], ['query']), + 'getSalesOwnersShare' : IDL.Func([], [Fraction], ['query']), + 'getSellerAffiliateShare' : IDL.Func([], [Fraction], ['query']), + 'getUploadOwnersShare' : IDL.Func([], [Fraction], ['query']), + 'getUpvotesOwnersShare' : IDL.Func([], [Fraction], ['query']), + 'init' : IDL.Func([IDL.Vec(IDL.Principal)], [], []), + 'payout' : IDL.Func([IDL.Opt(Subaccount)], [], ['oneway']), + 'setBuyerAffiliateShare' : IDL.Func([Fraction], [], ['oneway']), + 'setOwners' : IDL.Func([IDL.Vec(IDL.Principal)], [], []), + 'setSalesOwnersShare' : IDL.Func([Fraction], [], ['oneway']), + 'setSellerAffiliateShare' : IDL.Func([Fraction], [], ['oneway']), + 'setUploadOwnersShare' : IDL.Func([Fraction], [], ['oneway']), + 'setUpvotesOwnersShare' : IDL.Func([Fraction], [], ['oneway']), + }); + return Payments; +}; +export const init = ({ IDL }) => { return []; }; diff --git a/e2e/assets/wrong_ids/src/declarations/personhood/index.d.ts b/e2e/assets/wrong_ids/src/declarations/personhood/index.d.ts new file mode 100644 index 0000000000..ec7ce9163d --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/personhood/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './personhood.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const personhood: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/personhood/index.js b/e2e/assets/wrong_ids/src/declarations/personhood/index.js new file mode 100644 index 0000000000..93e657e0ef --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/personhood/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./personhood.did.js"; +export { idlFactory } from "./personhood.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_PERSONHOOD; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const personhood = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/personhood/personhood.did b/e2e/assets/wrong_ids/src/declarations/personhood/personhood.did new file mode 100644 index 0000000000..793cd13fb3 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/personhood/personhood.did @@ -0,0 +1,35 @@ +type TransformArgs = + record { + context: blob; + response: HttpResponsePayload; + }; +type HttpResponsePayload = + record { + body: vec nat8; + headers: vec HttpHeader; + status: nat; + }; +type HttpHeader = + record { + name: text; + value: text; + }; +service : { + getEthereumSigningMessage: () -> (record { + message: text; + nonce: text; + }); + removeHTTPHeaders: (TransformArgs) -> (HttpResponsePayload) query; + scoreBySignedEthereumAddress: + (record { + address: text; + nonce: text; + signature: text; + }) -> (text); + submitSignedEthereumAddressForScore: + (record { + address: text; + nonce: text; + signature: text; + }) -> (text); +} diff --git a/e2e/assets/wrong_ids/src/declarations/personhood/personhood.did.d.ts b/e2e/assets/wrong_ids/src/declarations/personhood/personhood.did.d.ts new file mode 100644 index 0000000000..5294a2b961 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/personhood/personhood.did.d.ts @@ -0,0 +1,31 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export interface HttpHeader { 'value' : string, 'name' : string } +export interface HttpResponsePayload { + 'status' : bigint, + 'body' : Uint8Array | number[], + 'headers' : Array, +} +export interface TransformArgs { + 'context' : Uint8Array | number[], + 'response' : HttpResponsePayload, +} +export interface _SERVICE { + 'getEthereumSigningMessage' : ActorMethod< + [], + { 'message' : string, 'nonce' : string } + >, + 'removeHTTPHeaders' : ActorMethod<[TransformArgs], HttpResponsePayload>, + 'scoreBySignedEthereumAddress' : ActorMethod< + [{ 'signature' : string, 'address' : string, 'nonce' : string }], + string + >, + 'submitSignedEthereumAddressForScore' : ActorMethod< + [{ 'signature' : string, 'address' : string, 'nonce' : string }], + string + >, +} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/personhood/personhood.did.js b/e2e/assets/wrong_ids/src/declarations/personhood/personhood.did.js new file mode 100644 index 0000000000..ac90b79b0f --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/personhood/personhood.did.js @@ -0,0 +1,47 @@ +export const idlFactory = ({ IDL }) => { + const HttpHeader = IDL.Record({ 'value' : IDL.Text, 'name' : IDL.Text }); + const HttpResponsePayload = IDL.Record({ + 'status' : IDL.Nat, + 'body' : IDL.Vec(IDL.Nat8), + 'headers' : IDL.Vec(HttpHeader), + }); + const TransformArgs = IDL.Record({ + 'context' : IDL.Vec(IDL.Nat8), + 'response' : HttpResponsePayload, + }); + return IDL.Service({ + 'getEthereumSigningMessage' : IDL.Func( + [], + [IDL.Record({ 'message' : IDL.Text, 'nonce' : IDL.Text })], + [], + ), + 'removeHTTPHeaders' : IDL.Func( + [TransformArgs], + [HttpResponsePayload], + ['query'], + ), + 'scoreBySignedEthereumAddress' : IDL.Func( + [ + IDL.Record({ + 'signature' : IDL.Text, + 'address' : IDL.Text, + 'nonce' : IDL.Text, + }), + ], + [IDL.Text], + [], + ), + 'submitSignedEthereumAddressForScore' : IDL.Func( + [ + IDL.Record({ + 'signature' : IDL.Text, + 'address' : IDL.Text, + 'nonce' : IDL.Text, + }), + ], + [IDL.Text], + [], + ), + }); +}; +export const init = ({ IDL }) => { return []; }; diff --git a/e2e/assets/wrong_ids/src/declarations/pst/index.d.ts b/e2e/assets/wrong_ids/src/declarations/pst/index.d.ts new file mode 100644 index 0000000000..1bc4fb2956 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/pst/index.d.ts @@ -0,0 +1,50 @@ +import type { + ActorSubclass, + HttpAgentOptions, + ActorConfig, + Agent, +} from "@dfinity/agent"; +import type { Principal } from "@dfinity/principal"; +import type { IDL } from "@dfinity/candid"; + +import { _SERVICE } from './pst.did'; + +export declare const idlFactory: IDL.InterfaceFactory; +export declare const canisterId: string; + +export declare interface CreateActorOptions { + /** + * @see {@link Agent} + */ + agent?: Agent; + /** + * @see {@link HttpAgentOptions} + */ + agentOptions?: HttpAgentOptions; + /** + * @see {@link ActorConfig} + */ + actorOptions?: ActorConfig; +} + +/** + * Intializes an {@link ActorSubclass}, configured with the provided SERVICE interface of a canister. + * @constructs {@link ActorSubClass} + * @param {string | Principal} canisterId - ID of the canister the {@link Actor} will talk to + * @param {CreateActorOptions} options - see {@link CreateActorOptions} + * @param {CreateActorOptions["agent"]} options.agent - a pre-configured agent you'd like to use. Supercedes agentOptions + * @param {CreateActorOptions["agentOptions"]} options.agentOptions - options to set up a new agent + * @see {@link HttpAgentOptions} + * @param {CreateActorOptions["actorOptions"]} options.actorOptions - options for the Actor + * @see {@link ActorConfig} + */ +export declare const createActor: ( + canisterId: string | Principal, + options?: CreateActorOptions +) => ActorSubclass<_SERVICE>; + +/** + * Intialized Actor using default settings, ready to talk to a canister using its candid interface + * @constructs {@link ActorSubClass} + */ +export declare const pst: ActorSubclass<_SERVICE>; diff --git a/e2e/assets/wrong_ids/src/declarations/pst/index.js b/e2e/assets/wrong_ids/src/declarations/pst/index.js new file mode 100644 index 0000000000..ac59d38605 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/pst/index.js @@ -0,0 +1,42 @@ +import { Actor, HttpAgent } from "@dfinity/agent"; + +// Imports and re-exports candid interface +import { idlFactory } from "./pst.did.js"; +export { idlFactory } from "./pst.did.js"; + +/* CANISTER_ID is replaced by webpack based on node environment + * Note: canister environment variable will be standardized as + * process.env.CANISTER_ID_ + * beginning in dfx 0.15.0 + */ +export const canisterId = + process.env.CANISTER_ID_PST; + +export const createActor = (canisterId, options = {}) => { + const agent = options.agent || new HttpAgent({ ...options.agentOptions }); + + if (options.agent && options.agentOptions) { + console.warn( + "Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent." + ); + } + + // Fetch root key for certificate validation during development + if (process.env.DFX_NETWORK !== "ic") { + agent.fetchRootKey().catch((err) => { + console.warn( + "Unable to fetch root key. Check to ensure that your local replica is running" + ); + console.error(err); + }); + } + + // Creates an actor with using the candid interface and the HttpAgent + return Actor.createActor(idlFactory, { + agent, + canisterId, + ...options.actorOptions, + }); +}; + +export const pst = canisterId ? createActor(canisterId) : undefined; diff --git a/e2e/assets/wrong_ids/src/declarations/pst/pst.did b/e2e/assets/wrong_ids/src/declarations/pst/pst.did new file mode 100644 index 0000000000..2c31c924b0 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/pst/pst.did @@ -0,0 +1,163 @@ +type Value = + variant { + Blob: blob; + Int: int; + Nat: nat; + Text: text; + }; +type TxIndex__1 = nat; +type TxIndex = nat; +type TransferResult = + variant { + Err: TransferError; + Ok: TxIndex; + }; +type TransferError = + variant { + BadBurn: record {min_burn_amount: Balance;}; + BadFee: record {expected_fee: Balance;}; + CreatedInFuture: record {ledger_time: Timestamp;}; + Duplicate: record {duplicate_of: TxIndex;}; + GenericError: record { + error_code: nat; + message: text; + }; + InsufficientFunds: record {balance: Balance;}; + TemporarilyUnavailable; + TooOld; + }; +type TransferArgs = + record { + amount: Balance; + created_at_time: opt nat64; + fee: opt Balance; + from_subaccount: opt Subaccount; + memo: opt blob; + to: Account; + }; +type Transfer = + record { + amount: Balance; + created_at_time: opt nat64; + fee: opt Balance; + from: Account; + memo: opt blob; + to: Account; + }; +type Transaction__1 = + record { + burn: opt Burn; + index: TxIndex; + kind: text; + mint: opt Mint__1; + timestamp: Timestamp; + transfer: opt Transfer; + }; +type TransactionRange = record {transactions: vec Transaction;}; +type Transaction = + record { + burn: opt Burn; + index: TxIndex; + kind: text; + mint: opt Mint__1; + timestamp: Timestamp; + transfer: opt Transfer; + }; +type Timestamp = nat64; +type SupportedStandard = + record { + name: text; + url: text; + }; +type Subaccount = blob; +type QueryArchiveFn = func (GetTransactionsRequest__1) -> + (TransactionRange) query; +type PST = + service { + burn: (BurnArgs) -> (TransferResult); + deposit_cycles: () -> (); + get_transaction: (TxIndex__1) -> (opt Transaction__1); + get_transactions: (GetTransactionsRequest) -> + (GetTransactionsResponse) query; + icrc1_balance_of: (Account__1) -> (Balance__1) query; + icrc1_decimals: () -> (nat8) query; + icrc1_fee: () -> (Balance__1) query; + icrc1_metadata: () -> (vec MetaDatum) query; + icrc1_minting_account: () -> (opt Account__1) query; + /// Functions for the ICRC1 token standard + icrc1_name: () -> (text) query; + icrc1_supported_standards: () -> (vec SupportedStandard) query; + icrc1_symbol: () -> (text) query; + icrc1_total_supply: () -> (Balance__1) query; + icrc1_transfer: (TransferArgs) -> (TransferResult); + mint: (Mint) -> (TransferResult); + }; +type Mint__1 = + record { + amount: Balance; + created_at_time: opt nat64; + memo: opt blob; + to: Account; + }; +type Mint = + record { + amount: Balance; + created_at_time: opt nat64; + memo: opt blob; + to: Account; + }; +type MetaDatum = + record { + text; + Value; + }; +type GetTransactionsResponse = + record { + archived_transactions: vec ArchivedTransaction; + first_index: TxIndex; + log_length: nat; + transactions: vec Transaction; + }; +type GetTransactionsRequest__1 = + record { + length: nat; + start: TxIndex; + }; +type GetTransactionsRequest = + record { + length: nat; + start: TxIndex; + }; +type BurnArgs = + record { + amount: Balance; + created_at_time: opt nat64; + from_subaccount: opt Subaccount; + memo: opt blob; + }; +type Burn = + record { + amount: Balance; + created_at_time: opt nat64; + from: Account; + memo: opt blob; + }; +type Balance__1 = nat; +type Balance = nat; +type ArchivedTransaction = + record { + callback: QueryArchiveFn; + length: nat; + start: TxIndex; + }; +type Account__1 = + record { + owner: principal; + subaccount: opt Subaccount; + }; +type Account = + record { + owner: principal; + subaccount: opt Subaccount; + }; +service : () -> PST diff --git a/e2e/assets/wrong_ids/src/declarations/pst/pst.did.d.ts b/e2e/assets/wrong_ids/src/declarations/pst/pst.did.d.ts new file mode 100644 index 0000000000..63f1b7807b --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/pst/pst.did.d.ts @@ -0,0 +1,136 @@ +import type { Principal } from '@dfinity/principal'; +import type { ActorMethod } from '@dfinity/agent'; +import type { IDL } from '@dfinity/candid'; + +export interface Account { + 'owner' : Principal, + 'subaccount' : [] | [Subaccount], +} +export interface Account__1 { + 'owner' : Principal, + 'subaccount' : [] | [Subaccount], +} +export interface ArchivedTransaction { + 'callback' : QueryArchiveFn, + 'start' : TxIndex, + 'length' : bigint, +} +export type Balance = bigint; +export type Balance__1 = bigint; +export interface Burn { + 'from' : Account, + 'memo' : [] | [Uint8Array | number[]], + 'created_at_time' : [] | [bigint], + 'amount' : Balance, +} +export interface BurnArgs { + 'memo' : [] | [Uint8Array | number[]], + 'from_subaccount' : [] | [Subaccount], + 'created_at_time' : [] | [bigint], + 'amount' : Balance, +} +export interface GetTransactionsRequest { 'start' : TxIndex, 'length' : bigint } +export interface GetTransactionsRequest__1 { + 'start' : TxIndex, + 'length' : bigint, +} +export interface GetTransactionsResponse { + 'first_index' : TxIndex, + 'log_length' : bigint, + 'transactions' : Array, + 'archived_transactions' : Array, +} +export type MetaDatum = [string, Value]; +export interface Mint { + 'to' : Account, + 'memo' : [] | [Uint8Array | number[]], + 'created_at_time' : [] | [bigint], + 'amount' : Balance, +} +export interface Mint__1 { + 'to' : Account, + 'memo' : [] | [Uint8Array | number[]], + 'created_at_time' : [] | [bigint], + 'amount' : Balance, +} +export interface PST { + 'burn' : ActorMethod<[BurnArgs], TransferResult>, + 'deposit_cycles' : ActorMethod<[], undefined>, + 'get_transaction' : ActorMethod<[TxIndex__1], [] | [Transaction__1]>, + 'get_transactions' : ActorMethod< + [GetTransactionsRequest], + GetTransactionsResponse + >, + 'icrc1_balance_of' : ActorMethod<[Account__1], Balance__1>, + 'icrc1_decimals' : ActorMethod<[], number>, + 'icrc1_fee' : ActorMethod<[], Balance__1>, + 'icrc1_metadata' : ActorMethod<[], Array>, + 'icrc1_minting_account' : ActorMethod<[], [] | [Account__1]>, + 'icrc1_name' : ActorMethod<[], string>, + 'icrc1_supported_standards' : ActorMethod<[], Array>, + 'icrc1_symbol' : ActorMethod<[], string>, + 'icrc1_total_supply' : ActorMethod<[], Balance__1>, + 'icrc1_transfer' : ActorMethod<[TransferArgs], TransferResult>, + 'mint' : ActorMethod<[Mint], TransferResult>, +} +export type QueryArchiveFn = ActorMethod< + [GetTransactionsRequest__1], + TransactionRange +>; +export type Subaccount = Uint8Array | number[]; +export interface SupportedStandard { 'url' : string, 'name' : string } +export type Timestamp = bigint; +export interface Transaction { + 'burn' : [] | [Burn], + 'kind' : string, + 'mint' : [] | [Mint__1], + 'timestamp' : Timestamp, + 'index' : TxIndex, + 'transfer' : [] | [Transfer], +} +export interface TransactionRange { 'transactions' : Array } +export interface Transaction__1 { + 'burn' : [] | [Burn], + 'kind' : string, + 'mint' : [] | [Mint__1], + 'timestamp' : Timestamp, + 'index' : TxIndex, + 'transfer' : [] | [Transfer], +} +export interface Transfer { + 'to' : Account, + 'fee' : [] | [Balance], + 'from' : Account, + 'memo' : [] | [Uint8Array | number[]], + 'created_at_time' : [] | [bigint], + 'amount' : Balance, +} +export interface TransferArgs { + 'to' : Account, + 'fee' : [] | [Balance], + 'memo' : [] | [Uint8Array | number[]], + 'from_subaccount' : [] | [Subaccount], + 'created_at_time' : [] | [bigint], + 'amount' : Balance, +} +export type TransferError = { + 'GenericError' : { 'message' : string, 'error_code' : bigint } + } | + { 'TemporarilyUnavailable' : null } | + { 'BadBurn' : { 'min_burn_amount' : Balance } } | + { 'Duplicate' : { 'duplicate_of' : TxIndex } } | + { 'BadFee' : { 'expected_fee' : Balance } } | + { 'CreatedInFuture' : { 'ledger_time' : Timestamp } } | + { 'TooOld' : null } | + { 'InsufficientFunds' : { 'balance' : Balance } }; +export type TransferResult = { 'Ok' : TxIndex } | + { 'Err' : TransferError }; +export type TxIndex = bigint; +export type TxIndex__1 = bigint; +export type Value = { 'Int' : bigint } | + { 'Nat' : bigint } | + { 'Blob' : Uint8Array | number[] } | + { 'Text' : string }; +export interface _SERVICE extends PST {} +export declare const idlFactory: IDL.InterfaceFactory; +export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[]; diff --git a/e2e/assets/wrong_ids/src/declarations/pst/pst.did.js b/e2e/assets/wrong_ids/src/declarations/pst/pst.did.js new file mode 100644 index 0000000000..1aa3c567c1 --- /dev/null +++ b/e2e/assets/wrong_ids/src/declarations/pst/pst.did.js @@ -0,0 +1,148 @@ +export const idlFactory = ({ IDL }) => { + const Subaccount = IDL.Vec(IDL.Nat8); + const Balance = IDL.Nat; + const BurnArgs = IDL.Record({ + 'memo' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'from_subaccount' : IDL.Opt(Subaccount), + 'created_at_time' : IDL.Opt(IDL.Nat64), + 'amount' : Balance, + }); + const TxIndex = IDL.Nat; + const Timestamp = IDL.Nat64; + const TransferError = IDL.Variant({ + 'GenericError' : IDL.Record({ + 'message' : IDL.Text, + 'error_code' : IDL.Nat, + }), + 'TemporarilyUnavailable' : IDL.Null, + 'BadBurn' : IDL.Record({ 'min_burn_amount' : Balance }), + 'Duplicate' : IDL.Record({ 'duplicate_of' : TxIndex }), + 'BadFee' : IDL.Record({ 'expected_fee' : Balance }), + 'CreatedInFuture' : IDL.Record({ 'ledger_time' : Timestamp }), + 'TooOld' : IDL.Null, + 'InsufficientFunds' : IDL.Record({ 'balance' : Balance }), + }); + const TransferResult = IDL.Variant({ 'Ok' : TxIndex, 'Err' : TransferError }); + const TxIndex__1 = IDL.Nat; + const Account = IDL.Record({ + 'owner' : IDL.Principal, + 'subaccount' : IDL.Opt(Subaccount), + }); + const Burn = IDL.Record({ + 'from' : Account, + 'memo' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'created_at_time' : IDL.Opt(IDL.Nat64), + 'amount' : Balance, + }); + const Mint__1 = IDL.Record({ + 'to' : Account, + 'memo' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'created_at_time' : IDL.Opt(IDL.Nat64), + 'amount' : Balance, + }); + const Transfer = IDL.Record({ + 'to' : Account, + 'fee' : IDL.Opt(Balance), + 'from' : Account, + 'memo' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'created_at_time' : IDL.Opt(IDL.Nat64), + 'amount' : Balance, + }); + const Transaction__1 = IDL.Record({ + 'burn' : IDL.Opt(Burn), + 'kind' : IDL.Text, + 'mint' : IDL.Opt(Mint__1), + 'timestamp' : Timestamp, + 'index' : TxIndex, + 'transfer' : IDL.Opt(Transfer), + }); + const GetTransactionsRequest = IDL.Record({ + 'start' : TxIndex, + 'length' : IDL.Nat, + }); + const Transaction = IDL.Record({ + 'burn' : IDL.Opt(Burn), + 'kind' : IDL.Text, + 'mint' : IDL.Opt(Mint__1), + 'timestamp' : Timestamp, + 'index' : TxIndex, + 'transfer' : IDL.Opt(Transfer), + }); + const GetTransactionsRequest__1 = IDL.Record({ + 'start' : TxIndex, + 'length' : IDL.Nat, + }); + const TransactionRange = IDL.Record({ + 'transactions' : IDL.Vec(Transaction), + }); + const QueryArchiveFn = IDL.Func( + [GetTransactionsRequest__1], + [TransactionRange], + ['query'], + ); + const ArchivedTransaction = IDL.Record({ + 'callback' : QueryArchiveFn, + 'start' : TxIndex, + 'length' : IDL.Nat, + }); + const GetTransactionsResponse = IDL.Record({ + 'first_index' : TxIndex, + 'log_length' : IDL.Nat, + 'transactions' : IDL.Vec(Transaction), + 'archived_transactions' : IDL.Vec(ArchivedTransaction), + }); + const Account__1 = IDL.Record({ + 'owner' : IDL.Principal, + 'subaccount' : IDL.Opt(Subaccount), + }); + const Balance__1 = IDL.Nat; + const Value = IDL.Variant({ + 'Int' : IDL.Int, + 'Nat' : IDL.Nat, + 'Blob' : IDL.Vec(IDL.Nat8), + 'Text' : IDL.Text, + }); + const MetaDatum = IDL.Tuple(IDL.Text, Value); + const SupportedStandard = IDL.Record({ 'url' : IDL.Text, 'name' : IDL.Text }); + const TransferArgs = IDL.Record({ + 'to' : Account, + 'fee' : IDL.Opt(Balance), + 'memo' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'from_subaccount' : IDL.Opt(Subaccount), + 'created_at_time' : IDL.Opt(IDL.Nat64), + 'amount' : Balance, + }); + const Mint = IDL.Record({ + 'to' : Account, + 'memo' : IDL.Opt(IDL.Vec(IDL.Nat8)), + 'created_at_time' : IDL.Opt(IDL.Nat64), + 'amount' : Balance, + }); + const PST = IDL.Service({ + 'burn' : IDL.Func([BurnArgs], [TransferResult], []), + 'deposit_cycles' : IDL.Func([], [], []), + 'get_transaction' : IDL.Func([TxIndex__1], [IDL.Opt(Transaction__1)], []), + 'get_transactions' : IDL.Func( + [GetTransactionsRequest], + [GetTransactionsResponse], + ['query'], + ), + 'icrc1_balance_of' : IDL.Func([Account__1], [Balance__1], ['query']), + 'icrc1_decimals' : IDL.Func([], [IDL.Nat8], ['query']), + 'icrc1_fee' : IDL.Func([], [Balance__1], ['query']), + 'icrc1_metadata' : IDL.Func([], [IDL.Vec(MetaDatum)], ['query']), + 'icrc1_minting_account' : IDL.Func([], [IDL.Opt(Account__1)], ['query']), + 'icrc1_name' : IDL.Func([], [IDL.Text], ['query']), + 'icrc1_supported_standards' : IDL.Func( + [], + [IDL.Vec(SupportedStandard)], + ['query'], + ), + 'icrc1_symbol' : IDL.Func([], [IDL.Text], ['query']), + 'icrc1_total_supply' : IDL.Func([], [Balance__1], ['query']), + 'icrc1_transfer' : IDL.Func([TransferArgs], [TransferResult], []), + 'mint' : IDL.Func([Mint], [TransferResult], []), + }); + return PST; +}; +export const init = ({ IDL }) => { return []; }; diff --git a/e2e/assets/wrong_ids/src/frontend/assets/.ic-assets.json5 b/e2e/assets/wrong_ids/src/frontend/assets/.ic-assets.json5 new file mode 100644 index 0000000000..4515979a2c --- /dev/null +++ b/e2e/assets/wrong_ids/src/frontend/assets/.ic-assets.json5 @@ -0,0 +1,56 @@ +[ + { + "match": "**/*", + "headers": { + // Security: The Content Security Policy (CSP) given below aims at working with many apps rather than providing maximal security. + // We recommend tightening the CSP for your specific application. Some recommendations are as follows: + // - Use the CSP Evaluator (https://csp-evaluator.withgoogle.com/) to validate the CSP you define. + // - Follow the “Strict CSP” recommendations (https://csp.withgoogle.com/docs/strict-csp.html). However, note that in the context of the IC, + // nonces cannot be used because the response bodies must be static to work well with HTTP asset certification. + // Thus, we recommend to include script hashes (in combination with strict-dynamic) in the CSP as described + // in https://csp.withgoogle.com/docs/faq.html in section “What if my site is static and I can't add nonces to scripts?”. + // See for example the II CSP (https://github.com/dfinity/internet-identity/blob/main/src/internet_identity/src/http.rs). + // - It is recommended to tighten the connect-src directive. With the current CSP configuration the browser can + // make requests to https://*.icp0.io, hence being able to call any canister via https://icp0.io/api/v2/canister/{canister-ID}. + // This could potentially be used in combination with another vulnerability (e.g. XSS) to exfiltrate private data. + // The developer can configure this policy to only allow requests to their specific canisters, + // e.g: connect-src 'self' https://icp-api.io/api/v2/canister/{my-canister-ID}, where {my-canister-ID} has the following format: aaaaa-aaaaa-aaaaa-aaaaa-aaa + // - It is recommended to configure style-src, style-src-elem and font-src directives with the resources your canister is going to use + // instead of using the wild card (*) option. Normally this will include 'self' but also other third party styles or fonts resources (e.g: https://fonts.googleapis.com or other CDNs) + + // Notes about the CSP below: + // - script-src 'unsafe-eval' is currently required because agent-js uses a WebAssembly module for the validation of bls signatures. + // There is currently no other way to allow execution of WebAssembly modules with CSP. + // See: https://github.com/WebAssembly/content-security-policy/blob/main/proposals/CSP.md. + // - We added img-src data: because data: images are used often. + // - frame-ancestors: none mitigates clickjacking attacks. See https://owasp.org/www-community/attacks/Clickjacking. + "Content-Security-Policy": "default-src 'self' http://localhost:8000 http://*.localhost:8000;frame-src 'self' https://nfid.one;script-src 'self' 'unsafe-eval';connect-src 'self' https://icp0.io https://*.icp0.io http://localhost:8000 http://*.localhost:8000;img-src 'self' data:;style-src * 'unsafe-inline';style-src-elem * 'unsafe-inline';font-src *;object-src 'none';base-uri 'self';frame-ancestors 'none';form-action 'self';upgrade-insecure-requests;", + + // Security: The permissions policy disables all features for security reasons. If your site needs such permissions, activate them. + // To configure permissions go here https://www.permissionspolicy.com/ + "Permissions-Policy": "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=('self'), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=(), clipboard-read=(), clipboard-write=(), gamepad=(), speaker-selection=(), conversion-measurement=(), focus-without-user-activation=(), hid=(), idle-detection=(), interest-cohort=(), serial=(), sync-script=(), trust-token-redemption=(), window-placement=(), vertical-scroll=()", + + // Security: Mitigates clickjacking attacks. + // See: https://owasp.org/www-community/attacks/Clickjacking. + "X-Frame-Options": "DENY", + + // Security: Avoids forwarding referrer information to other origins. + // See: https://owasp.org/www-project-secure-headers/#referrer-policy. + "Referrer-Policy": "same-origin", + + // Security: Tells the user’s browser that it must always use HTTPS with your site. + // See: https://owasp.org/www-project-secure-headers/#http-strict-transport-security + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + + // Security: Prevents the browser from interpreting files as a different MIME type to what is specified in the Content-Type header. + // See: https://owasp.org/www-project-secure-headers/#x-content-type-options + "X-Content-Type-Options": "nosniff", + + // Security: Enables browser features to mitigate some of the XSS attacks. Note that it has to be in mode=block. + // See: https://owasp.org/www-community/attacks/xss/ + "X-XSS-Protection": "1; mode=block" + }, + // redirect all requests from .raw.icp0.io to .icp0.io (this redirection is the default) + "allow_raw_access": false + }, +] diff --git a/e2e/assets/wrong_ids/src/frontend/assets/dfinity.svg b/e2e/assets/wrong_ids/src/frontend/assets/dfinity.svg new file mode 100644 index 0000000000..2442b26022 --- /dev/null +++ b/e2e/assets/wrong_ids/src/frontend/assets/dfinity.svg @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/e2e/assets/wrong_ids/src/frontend/assets/favicon.ico b/e2e/assets/wrong_ids/src/frontend/assets/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..4fe0e85dd7a3ab7bcffd7c3e542ad586b8132d0d GIT binary patch literal 141426 zcmeEtgLfuRuy$aE>e`P8dP!OAcIw7I|lSM#45-UMLn3(=oE~5Ye zX}|i{lH~tn6c7*-bTAN z1D5?8GY~{tOjylx{p!(?L&EGevXb^TIxe(6nC zd%&f>dXJ&{)kAl-S@p-w3Q<_!EOe^-Xk}!t@zsif6M=~i;2a>4@}e$!}DkN zY~5Xxw5W*kQ;oFph>X!YQOA@6UPDSCpM$@48tMte6xl(tv>d0ct8{N8_H$ukjQ_Rkw4!vXMyuP_fKHcUIfz#E}X)^lH3bn?5 zEnw1b69v`r*Cv6iXVbkiKh5oW9gMDsh_%^*U^Okzk1a7f?xZ8roF5aSg3@wnRRc0) z&vzA5U%72H1o~D|;np|&4Z~t$alC4Wjq3DCA(W>c5Nab10hM|_SAr=6y5AlNK7v`P z+jtvli%q|KXlpTo0A6O``1#Ex%8^k8s}@T#aN3^bADL^%Fu@8O!&RauJm%G*n5p{2 ze>>K|c}fp)Qb-(&#RA)W5fPMP&0{}nQ<-t(Z^_rqxm~6te6K7j)VvNPX+049nCoY$ zx3cQinDz5nw5RbJrA6g0$K`=XV}{h0Gw-~B3Afj>2c!_Ws8q0(Lx;J)WNCa3tCPnX zZA3`W2kWeWnv_sC2v&BC0!au&$DI^Kb;_f{h6);fcf+B(YPsn&j(ICAK?{97nD+&<@qDM!xV^`lGB9Q~{uJpF&A%*h20YWHEfZI`{cj0-QExau%w z;aiooy?CW)L(?U&jw>@gevVwOX9GPy7033F{S^*tkU=z?=ZcI}upZk?K^c*uvI^HzB|fmUcXy))ngcKdXFXfj(7JzsLi#mX|~q8++Mi{bqG(>Y&% z3*tWk35^%J?VvjFXClpgCNbOgrkCvtWbL)#2lex1ee8;D?hh1Jmf}yy0&Ez^x^MdH^=di-B7D0hN zNLP)(cCfkY$Hl+)(63i_J;9dZWJ5yIe7a)9UkKU)ulQ!KgN!I$z~HsiZf4h z;>*>dwt?Ms(lnsf>G!G_=BTj<39sa(ePlCPl(XA*6)hOW@ZP;moTF(q$g7S;AQTbc zJ)50T{*$HSYY*MBkEpq5rbwlH@$b})MLi4M&vxU}UL&R;_{j=Kt7K5^5$5dVjVbfV zZG=ZJ+nVEhZh{m~rQgDbo)AxtO7*c=9$W+CK~*!q1VmLX$3vk%OI;n<6;7dSx3-4S?D#m`=-Pl4OSdEo!6AyKKlVQ6Rh7Qr--digA%U= z2c-8g>m_<_hZU+Nws1lQ3vdgxoXQ|`$EhKO1%bVB#J+_Dh7XE^+h2f$fuQW+u?sOY zf^ldu6@dspD85LU%B8BR$P_XFZ*eLm)|13`*?Wbm_<{7ee;6NsNSQptBe_JQbAPxE zKVzrUBEMn6p z16~7w_tgMNF`(k1fA_UDymC_ZCUi6k>;8APh8!1gUU4Y&30#eAbYbS*gfA#es&e{7 ziE)k$idO`4iA-`FzMb;;g!@mKtNHC=^(!?8Go<;KIlNkst7NN(@pGHsM&?d=BW3D< zg-{RRoGsy~L^6`^i1z5R+>qY;uke~4#W?}$WG5qaEi%}(gkMaP{k9uQP9%Y2Kq0}` z?#baxNdTGmA9yWi9!+7g(_s60_S4BSCUMe~LvL*RWH7lJAC~e$DkCiD@!xP(FORu#QRYRa0x8ct*K$16efd=`yHIJ z0mqYu`sc!jgW&q|g5KB_{v?IgCR#xcUrs)~)Y5>$EdJLC@tO|nw8RXAl^t$-3Ep}h zs9Ed$n^jyL@_nsuk`c}?#eddBj@WX6=K6_810E9Jt>&I_6WEnZCb^7Wt~U_aCL$3D zPkDPKsWq%tHk>t!_o=nwzd&(?EsUUkFP9(92`c@z3qBnT;IB4~fP&736qE^Dg zIr{nraV`ZtUNsj(hvNmf0gDLV z*ew2l5y#nZ=W`Tjyyt|e$=wQT(STtkVE#~Dbuc{O{Z5ALsCb5T{kHq6lpEXHZl9*L zjvE{zrL>blT?w=`YK-|cqy7kj^HVxz_Og!Jq|{g_^Vi-DXn}hVZ(zNzv?JEZD*a4y zqvAK{T)3cp_;MFY=tBM~1$r0MLg0dthC6 z!6H5-@dgik4g09=w0+3^Vo6SoX`0EXqF-Ah0GX@3MOa3xbWtL|e9RR&cI_K|I1K;0D@k5z0!)G2%O>9bgZ!UtECF6ZE6LBhQ z$`OxFF{GZiY=B2s&or3GJ4}a9BENSv)1}(VI(@iQ3v6(@MnSs0Y&^a`AlPN%AmyO@8ns3r}qiR$xwVuN`{^y{O`Al7lDQb`otw)UTzJrFwoc_ z%~`NCE^;u@R?Vr1Rwr~yCf41xbIGpPSj)aFX`r0z+;%ovF@}<6k%}*ZItg1SM>9*w z=YZCe+RB=x3KV6wGI6qIeRt;f*q+oK#F?+z`o?DpSUfrWfHJ8#>uq;F3HVZqpye)xg()OxCi_JBAa573x1k zhc6icG$XB__XeDh?6xuvVq&SVkj%2=B`0DWweFpgakU@NCLTK)o-kohvLWzVKNYN* zK*>VvZ|k8c+{s}TT7vhA=;mFRwLQLroUO__PBORyUB9x_`YiMOwlsr!w(bYj@J0tB z9N}cHD|Gq@%gwe0EowEitATZZkTN*UKU(an`D^%(URiKL{s?MmVTm-Rru195z62*Y zJ`d=pA=bhx-Nd~=4)(?n3xxO5XAXr_C(g=~n@zHphryUHI94UD<>D!NDFNXfN4J>) zhL;03oAUY>GBN98KS@+0-M4IOvjw}a(gqzq-OwTvO5Nj4W0^w5KpnHb*~>1R6y4v~ z1=hnz^t=?s!?d!T0XvPVoWoThkp)~k>p++qmOeQ)-@v~m)rd2HLq;}|i6X+-NDF3X z?FYhbzN`98oyx>rV&GBDs-JcKY~m&pu#VkVAtIYcP}BZ(ABrPkNP>hkjx7; zkAJq^ppnDzr0gi`7oj6~ufW~;^rK~QvLUI-!qRA6!#`44zR|)izfkuPO+8^C8|Zr# z9|0DmIw{95dS(H^7NV7aH69nZ`DdD5{fDrid9KC}R(czG;olOJSt+ez)mr@wX1V=% z$#s`zIJx;hM94<^%GCBIYoLt7hkNJkUNV*ETlw+x=tDSy(zH7E`H-L1BV}gSpyp+A zWKFF4vunK>gnN!30jH3bOuy#NmjZEos9Z~`?O})FHB=#g4vCVeS6-1DPRU(PeK(F? z1<*Mlv@B%$Te4T3+sS{UKXvbx-cRqCitv#^qu**@r5aKR^Eg2EJ$HazVzroc& zU-Lt78W&t_G_ybzB-}<{YC`C!LgZGn4d4w)P#=G>9HnkE@b;bW0b`mhe5Y}o@*K*p zsUuHZZZJ;fwM0xB%#nYI~RHO%ZpVNM;e=phS%*{9PXgL*&pJ0RLz(L z7aERfd?Y1w;cyP5s*zwrF-GBbsS7vryP}Mwif7AxZtK+;i!}xgn&zBgz^zOe|CU7je!Nn&vfO;toghx z*UF#(uutJ+$iaSa1G=!5wIi7JUC2)UNl;G95`n)^JSg8RvcL9A;ID$;;Wx**O0(k8 z?Z%O`lbp|k*?mxgf?%<(j+JcAFKuaNanzmdK-J|3vQZ`93>UUIv!gqMlt`pUz82Ly__r=pUB3_R50!w#zhlGP^>LqeI8JCn+ zt^$WusyYbTUiA$K_kDC$gb>>V$qrPcvu4Xd4{~eO4_tG8gF08t;7Z~t_^NUU!{Qv^ z3ug%#HCTSP)J&804lLj0rn9P51lL$fI|JwIqGiRt<&Fu68s$Rp5a+I83@~ zg90y-CLVrHW)1*uqhH1KOc6zN9uqwjaSAe@{U;Py_f7@M5g<_NqBQ}}$wgpyb_?@P zYNnZvarCE3_`XqVQ0Z>B0%U?!7bJM2vn~wUPpvLq)31cT(fPMg5FzXzL8%Z@O(ifC zaLI@r9W@$95ys?#iDTY^TTm3(p@6lGF}57xisELPeS+Fd@!>8)gpGRcG_U)^F0rU= zyS`<>1_Nlw%G7D1A%{;)5G0tN_GQ58E15&Y)35jWn!gFpp9ZQ7S%4}{{=FjlTSP5& zfR{LcL2KbQMy|vqHFB6aQ@?Pe+tF=rc`m|==eH@ta^xrSc=;g2m1r7i#J*g$&2RBF zl_Iu5fLT?N)n(N0CxrRlMfFH<7+en1Q2X7S<&>l4vrCo+^<>a)vxA?+uNv=#Y62`e zJfTTPocli*1u7@Nw7=C-0(4n|=GGK2W_#l7m1?%TcU5;SglSBVZ zs5^Mt4ksh1H8_mP_j$;0|D$>M*+UlY(XDs+S&QD~{`?km?v6q&gC<~WIr62csT2<= zkH#w12{8_9R}*LSHWwd}J1|-gB4--(UFLaS!FZnbh$gy01%P zW1yErTM+>KDJ3qgEpd5MTK4r;JaIYCm2?!$Ku88^6)m{b#?(M0yQ#p&2kmKso=2Ys zYiSNnJN!DjGo-Ur{Dw+^79`E$ z11+#&%ROPr5^|ySh+C7~OQrxkUOzx!SZpuOG!yuLhYJ3%oOyHS_P><}euU&=zZV8j zScF?aY|=acW<^)XY^sRnQToQR>mWWX7fr#vuW}*IgmfSBFg|ZZ#azS1N!+3ez+~FS zv=Lm5yS&W9%2Ph3n6BRf2W>c79b7yOXk{4Z%(J2nz!{U*f8XY<=KwEJa>U=551ZAf zt}GIol%ONq_eg<9jR9Jz@$Uq9Ryu?m_LyMFRfaJ&El_G#!v-_%)QSRX@{z8yX_Zv1quCf*BnP(A>#?g=Gt+z<(tj{ zZGZ`vZuMz__i(XPR_ULC%07AtE5AldG%4td<8VlHW>dBr%@YpGEXQ%8I7b|}1akUuaMTlg%mem8?cB8m!V{kDov2{K05VAYLt5MWoq z1mgorDC*@ItALyK_|h;!5>4vYBsI>wfX>BxZO5725m5?#I3B47BTe3LnfFZr%92qd z2)aX1Q|YzVH1eVj(<)q#HqMK65mL6)oa$1 zw)!O#I>}Qh6%c_L*ohB>7lqyvvihw`mxuj0dTw|E%4K*7UV>4tw%k^zq%>pZHQXA( ze~bPbn&mPah0W(wOdL7^8zEGkv0TYANU3n9@{_g)n$ya|7E8-bs+=41Qi2;}G5N^6 z9(S~%P(3uy?rvwlwGNlb?0j_A>*({OB|j7gVlZ2Am>U%y@*fg9jhyoF+duc+ zOtYl|gF$B<`bo6fA`%c)wGJ2w&VD)`R~I`hksMuXG)%u8PB6z9rNX5-l}&{H8$mrH z&$uy2)=#&`M%QGrpxGFCHgwq^up3sR=ck=1pRJfZz@DE|{fe%k68h6769b!yl^ab? zax?ibpE08e0w5y|iH`O#-PYbQ$(|FcydYzqu3l~;fyh=FA+Wy&;H;e+jGDKb-sh0`?CvB$V3t$DKUf5PWt zyy|i8J`6^rBX4WlcC4g}|AfDQ=Mb|}Vr?D+S6r60W$9;z`HhL24|@?jLB5X$hput7 z$GV5CbyJWjsv!QYu^P2!qRTLiqfJp|jlP|Dn^I{VMhyU7F#vv%k&JAtMFxhz*6Ls) z#hR8PxwT|FaaSvTa>@M^9)emEH}iVTjkhfCHi2KNKaq=8Nv_=+D*e0J_Gf{l7O^ib zJ4Eo%_5@K?r2Uj#C9lk(4%=(*cR)#@U_~E*N;EwdpN;0VeIkI*#S@ewBp!=!d;(qDqpwkF2kie|Q3u(D#w=97W`x{$7$+=bUOu>%g}3 z3TvFy<+B+LUibF>V|nB<6)racS+D zqI|4{i2@AH7Buf;(yL&==HSKAz1uJ$nbvC#Vj_S`ow@TeHIpp@EYIs?wtv+HI5>?=uCo)?Nl%s;GA6YmiJ z&DsFnZC67&P0&vzTea|XDTa8llX~$xQqQAt)?G16DXpdz=`1tf_8k1}YLWQ6*O4VE z1OOTYL7j?{m1V^RT_kN{{p|@MzvsraB25Be+=!FqrS+O3KBCP!yY020Y>COqHR;HXn9KA5oBfO#iU3?35co z=pzgSgdpu>wucj@%VW3L@iNjMfry6a67SePU7g;61?Go%DIup2Z`DCd%|ddp-NgGW z4)+PxZwUu;bSd`nO+#W~foKO9c-6>h4SV;-jx&4s(W1OvzD4K;whkGT?SO{tx4wmA zT^neN_HDUo)28vFfzeyK+#o)yqcbNnSg*Bq%X#Sik_eM}U%qj)7p&_T7VELz`I$F8 zX>3}@rDFMQ;QB|3>ej6ZxpTCwZGW&N);ock8WxXHW05}BX9EpGH?A_s8!b#Y=X*=r zBw_p?16e1m6=Wk$HXB8}+h=!)&IOP&{L`Mx^R~#K#UM||A8vbbxnaU+nVJR80TN$|0=VTJhFk4L@j{-vGU zYMwK1L+&zXBXf3nN;*aO*Lf(BPf?Y5bN(Bwe4;O%W+6-NmZ}Si9`V==!*_;GtpAE4 zi@SalddV-p&S)d$D%`v#ZZc(&sRN3jrXmDoR@+&N)n@d%0;JFMY^4tz?t>7_uB3dd z?v5(*;JGV(OLCJNuOrIJc~g?9g?z2eumuBA?5G}!KZNC0LForJtP$%|w{wjtZlDA~ zVGG`+?ijh?mho|5>w^&+Paocvcev=kFqgGSWRlgbBwb`1E;=Jd!?%~)g9nDopO}&f zb}*$54|Law;^#Rx(SzF*-Nd!~(5@C$!+s>|A*+joOvXCmkiZYpmRV^>Ui}&D3LO~! z>(d=PMn-HuhOk=mO6Rto$H*tH>{7Ahz+DQQzxa&-^IE5OgX|9+)l?Z!L+0+j+JQgb zrE(p&6$X_>C%oDZK*=J55noG8UGm4u#h4aVfk2%^fVo8>rBJ_%;1x-^vdvPY(e9q8Foe?IR&yVD5;i+&wgleeP_3#tA-0D${i$5WZg8BVP04LhD=mfbYi4eH! zSuF=Cz6JR=5I5whbYw(7O(Xf|$?KK`hZuqa^b9>oP6!xp1lnCv2Ax|0HN3#3TL#t; z*zi(i)@v%$_daNMt2UrZ;YAGEKbHQl!}KF{NjYO~OX(RYAkg|2#J^fh~uG zFx5H+3-7?z8&^H)K=-S9bdPM3cI+`hS7fE|NL9O|?hKMN%c%g*4_TMy)(Ui+Ak&3D z6jLS})6JcJ7_=8&RU|=#%yQQ|1wP2e)YXxfS+kW=7<3jt6|G zQ!07FOTgQV9wFE2tSr=7v@A3#g7EO=JOI#e#VNVHnC5_jfT)ug#-$4yDqYokb4$F6 zf8sJ!&XJ5dJtm567c&%yX`k?X39v(<=b`9GOGUO%68=hc2Eo|?%6k~e__;zVG1td$ z(uD84TPka^ll`O$A|Z3lyNjuwFn`9je&!a5yNLS%K@}3-t|1HL-Vm<&yhe`Pe7z1` zk7aCpS6meYG3LV0Q-J3nK1zYKQH$(bHde}H_Di{uFb+S#_%VyiTRCs2UWQl0GE==v z=&6ALslvpYs?Bs55j$B316@bYqa(p~KFXQvrkf=B)%xlqgf50VHGsWiAOv1jf6gDUk z|0h0ggt0s$Oa7upE@V=KI;MIkabvdo?W+lM)M%eA|M=Z=fB_QYt3`c>gDgcJOcyNd zi`*-(t8W9kE3HH!Pl};XmJ9*}1p)&_5R_}knE~m3q8+;wOph%<_Ndtd<2^L9iAEp2 z0(2{n1oyPC2^9HVBJ&`(TPm*S^;tYF++euQx0{O$NgezL&(t5+!AL9gjCCaWyE0zV zJDe!FMWpsnGEJFjdIi#Z1#sldqm(W2PEOGN;wZg7vXS%$+i>5Zwei44(iLfO8|X4M zNkxNI$X_v&WCfYeK z#Q@|FF;smOqjTjQ$b4s*@X+zgY@)@&zQZ0S=&EQtgtuT;jG|8e9Kt#}-!_;T?Pv$d z#@B@1-X8<@J1UO={L(WOvBDP^(ocg3W5M4M>Awkfz5v{v`;L*D@5)#`&lC2r@AZG# zGZmxmd$nU9rzesL+m7Jl6xI21ee)lfn_cC}XA(ozQx$5P!$ei~6`+6UQjTD)&WP1W z9bHiB;Y|XIXB!VKG{`7z8TLX#A*f<;Ei;`DK#{Msj;gUwA?kRy$66Xto8V%DK%|lt zZBJS|5U?di;yXo=dY;jNo&$=}0+-eRWUmT3WdGpP*mw}*jKelntbJ)^BKW>v62fL$ z$dj9Zb<~`P#+$g6!8pl(Fe|{@+--HlXXtVKn55+--V3D+e@HnFgjsxX2ffm=z*~Iq z8q-kf-|zS60%U)%IA4HU$zd)-UX^J9u^6 z@kU&HMUywme)Zj1DVYTDw|4`7RK81ZAMGM<wtfQ6x^a`G_n7ZsbID zeToA;hdd%SeJt;Z6fTaKA&}KTp-FTc^q|d-BJ$KD?5M-o8}PbBL7$b@AhNy<+-u9o#mG{? zA;+g9)HK9>RNt6pBdv#EpsU!>N|Zz~0BCv*4PGcp=bzXzR7*=nSsOyW;fy1NMR&gk z@2nn4crx_btCh;v02An6#K5P{8%+Hg#q~K>+PVoh#B_z}f-v!*6WeyvA|@uK&J!;0{;U0Et9C`>n+Q6DiIKA)60~>`7WmBl( z^qFJPx}azP2+ZnynH2Ls#|1u}eJdSadM7HUgY$ra;z@bHS=uOt`^Vy@3&$%l5PNp% zy)WIMwYjGyNAAR&+b;)l>rsdVVPBQd5R|!RFFKLYopW<)@&4XnApxsrMv)N^J$c(l z_B@wo`ny;G33)8|YYX73jb&o=bG@0bR{9uex9yx%Ah)rF92?VO-B9rpIjGKfObsi0 zSpxYQT3Z&QDhh!?Zl#_A;!%6~JV`P)88j_~3Vxm#)i44wq>)P!Nm+z^M+-yu$KXy4 zgsc?!i~~i^8$0mH@@wKMR3DT=45s%|%Tcq|;b5iRHSY{RDk6iGyYqkocoW?%b6E3S z^J()&TW-tHmy;yj%)@52$uA8Ku>r_;H;OpZvV3JCQB-Q@KL?x(Z_Hala4{tt1uFFI zhAi)j^|N#Qs)mL-0>H45CvmIK6fW<$)FjqHx}u#~NbbTX{fbhn>Xk#(R=|!#BC-do zeuh%iGathbV;Yf)ud93A(tqSCD2q zrq?gCA!LE->^o2vT>@4~A5DP;myTB)rKHXFX7?*?mK(nbssHy_IZ=1-)=$nnHDMT(P$&aZgFU~E{%8rUWV59qilStcl@@MiSkrXqPm+eT#nOt= z!OZQT{k^_{hHctDNvOiidgy|gS8H|8YK^TVA?|`uBUMSIB$qkli_-?Lb5`w~x0S<7 z*6+C|8|pesHoN|&B-TyiBc+nuB+kC4zdYRs#X#RN&WKGv{O<^18V7p<%E86r-wdA7 z1w`UXG{X2?hw|6174>$tFHz`2R4MI`wnX$fPhOzils`Bgg3;iIseijy4 zSTNb^1Z+ez>+k+W9TDJ$IpI^**=tVhA}!Za4mV(l-@-!=x5k(fiWRT zEPP(R*?%fY&=JfYi~wELTExdf{;}-27)o=2%hcpfOpYhOvOfA2CPlh4xqsV{>KLaB6k?nxCq()vG-F__~H?VIVy)XJ!n-;iw7fYq> z@^gmnByC#{aQtK99fQ7?oot4YlExNPw;cYZb?tYzqqXw9A@j}hdNKZGI@Wg^t zSW04*OmTfGwqMpj6%%fk%xNKA!SaNB#h@hkqLGpCAaU?(ErZt_##X13Fw3MDLa(WZ z)qX-+-Kz|!lwUV$YjFi z&Wxkcg(AkZeb@TWl(7OS*i!TMwy_GICWGb19L33B^Y-PIAK{%YAhzOY(R4E}8g2L1 zJ3SI#EKt@W{Ks!&1}ywk_9P&hr2;AGe0v4}h$A5W;&xY=aR_6w30l)*QlVI$w; zJF}mfjO2#3Ayj}5;CQXaMvX7tt_7I)`krkKEo`+b8a8bj{I+D{R3HGaI~ z3#(u9`ade7QZx%Am3|Akn$32OQon%yqnfz#P{IC~fTg`!&psq&(k1s(awO+7m8=Q#P%B1{mjz!Czy-SsKc%5h| zYROuSwvuY>ab*EInesl3A*$_pKzezQD_S#0H&N1O?cJ?PIB!)n@EzzLUcT;-Ralam z23Ii5M>eJ1`bywTZj#~)0Ymf z3s-_?#1b>nK+T?3*Ms>8VH54xO)UQ56xl=UJc;HXIQb&vVG zEd%}FP z(%zwL$&ya-bF)J23mleGezIFwHr?*O!W@v6LKzkwPu`r(lZFW|+=SlGZGDdx z5ICj)?kuWr|?|#Mm642qo<)Wpqez4?ksLLbsOS0qFyD0S*f{C2ISNGS%Ey zBNh2wqb#nIKQhbJn6~4Z=MjbhMTkw&2dy~70nZ1Hw@FCnJT{3MHMUtAl|+#7b$+0fs!F>nO8)@} z0~w2h9BuNkHY&xOP^a4`A_fz=J1litnJ*rj?KF~p9xTG*E>&cKKfDvM3aBgU8HexD zA(0a*YQhB(m1XW<(*{0kc7Gd-yv!FCrbs9VT_@FUer{N~CAnq#WyY_Ig4UjTA(yba z2sUms(vb@o(9|{0YbXXLuIS|w>Sm|-z#4E^(O>X362e;Nx_RMhvs{X3IklBI3pR8p z%=T?H0M(M{21utOc-I6rQZ3@`A%Nll-$?HA{q3-+7GawusmxhUd`NI21(S~^IpagP(96G-XFjdI1H0$F#b&|?&XcSWxJ=SLNdzAPY(`8lV^(s&+EbGMv}5X)tHKF zpi`0}4@u-EtBQ;nyUlvkbT%G54Cp z=9u&D86ggPx+wJ=c5m`C&5%({aed#p+uk_3y>~==-^DLXOTkIYMAkmLZc^YGr@p#n zP8v3e?-i~c1j#h1ph1hk28S-NFkpw8O9P?dC0aLx6eOX+N^EyMU}-5%*H1#|uZjFq z;Nr)QLoQ7fTqJv7IGrIsF8p@&dRp}7nP(`ctiLXjLy*7Df%z7WIPC}-Bp^A>lnOs^6H z*;BOQQJpDM-VR~kuAk}lMxoc(3LR$fJ@4g4YB<{$SnMulL8wA|u&mWxsP0f%d@@NF zKhpgU?&DEdLiRtt`T5To$*TI8-+5GIKQ`Ik&ZmG!kl!MY2ZpmwVUp%|NdfMn-*id- zd2kK_VEcD_TIVgK>G7Si|1cTv`sc4}_YyxI!*!zTRow~)Q~zPLbEQ!1Ev=VA4XoI@ zQ(Ie`i?j>oJCsWrz?l0SCnx=`1+_q0ttki zlq$)z7;`ugqXVQM6SEWGF_Mja%CGQ3pM33E!ECqV)cJKQI7()O@J;zly$ALdHq-*G z@opWhdsG*BnfgJz!xhNt%46La%D;r+lVejkAb>W==qNX8R<7$}Vl_SDH6O3JSbmG8 zA_qe8k|Z;ZWb`Hp;W1}>9}%N52;~Lfny}dY;oA)1hF?0|-8UX$zAoBa+ZZa*-BoR| zb%WT}w^_=^GTe|lNrYYnH_0&=n=S0s^4l+lC8IP=6b2_5s){m}p!bp_FlL7V%~N>A zoywJox)zl1_;`F#yy$d81shG$cIZ#xXH7UW_Z((<3OsNwsy(+Kc@f?H6*MKVj*3X; zi-$=`iq0zcRRfe2)!wTHn*rGj6o`#&gLM#U5QK=i(R!%K5yXbsAB}+tXO^v0-dq1( zT4R9Pqp2Xe&fXJmojmhOrq0VL#PFSJx$I=n-XuFKddGzy@QNZp|FT68hmMY$sS1rc zrvh~BtxjvuNC}A!Il}~+>X2Ip+wMAMn|vtw<>4~ zJfDaOUN^$N8pR(6#@QDqrJK~3;xWtzg2A{=W?LIi^0!XxrS)iBa%gSaE)nTD-=*U? z9S;58Z*}X_+y|)|x)??;hz{l82|QTei0p25X-)YGd0F;?M!xUs(C7Jz5P8$$q@X~9 z*kWs9lH}0xvDrv`#?%??-b<1MOjA1zXyF5qqfjjLQ%A!Ax)KEb`sP<-chh(@VD^I$}Q72f5=?RfSRxQdkjXupazU8l5z+us=gkfBFn zDTbHof+HKo#IW8iiR<4+a49{7w3}97yfFHBSzL%pnu75`4hNaUqH%8nT$mLrW}F>X zPC4w5=$L#TVj(k>Tv}yH3%5X}vT1(b%~jxUr)#2;zJKgw$b1XSl=Kj8_nbbsY3RLj zvWO_SEG>*x3@b}BD(;~$>+n)qjSf+f!Vr8wfnCIpbvCJ=TAQvm%Hm|HyN2+@OC6eFe0N1?s*gM4%v#qlRV(YtN29=Y6v5g z{|ojuzpA1@1$zrN7y4lmB=fOj)xcQlVqp=J|+@&QX z$vU<;oXcGVaFn1#umre2K|)x4vXY-(*YR;8gVPq7R=d7XQkwlNO`>oimUCnVIxRjE zqB$g%j4!xznSwc9L&Sq=aU$-E<0LvWG?Wz8;b~Wf?kDfdoKKx6DR-Tq?Y$IxZaS#t zut$ugcTY7%7csb%WeNB2QED#&pcV6^)q6T}!eWdA5b`fFKO*OirSgY~EG-WbOlc#y zbTYQ9gJv`2+(f~++Z(^S=II$o^#lR()+CfIc@nBQ_(!mR^~9V z3M2%klg_0?l?W9$5Ev+Y^qxv*kR6OCqGeKQeTk7X(oa;2%c zv@mYUP+}oZSdZ(*m{TNqJLX=S$`+`9#oHPvUQ{|;&Dm+q&qgO|sj1q%-TM*t5)3n?g; zTwvO^skrV1e6K7HQQ70R)#E**Xhs#vU}#gDhTIOgSa}-2n*$ znYZl(sqJar5!L6}F*eTos4IbNCfW8JdZKts0<2H}Dl_ZvNQ)p}bV71r{vik+@p9BV z)QgaM&U|)VPQVf1zGYq6HnB!uO*|Qe*ma9udM;;11sG zwKIUh`+$D0$QwLS&h9ns=Zpa+`yE3tgB*H2jetz&u?qk=6MlG`*hA`mQS0!0fz)d= z6`?nqk9<#~M4w#(w}CxTHpIX-)t|!#ddfxLViLaVi9D2AdVg$O)yA7NAOep@dfLzn z-^TuemZgOJwxl>b9%UUCY?eK-#bX{4he)jj=b-yg3jh}NCE5RiJn@H7#20(Drlv>| z&HR-G3RM?z{9=?3%VfmJ)ATP`D9?dN#-vN53X6+e-){hsfsD7)A$k!12R!>L9gfF> zIH0glaY}`XZW!JAwwM+$v&o-}G^sDZW41k${rimUVE8=OHhN~lk=~%27~L;g=rRE~ zXit+huJR z%hPmr;{xN?^PgAm*Czc-b)NPceuTiSzEyDrMI zgb3Ld>ZwFKXTfy}K-Kz-gDlsZaR0E-7sXn-?gIO!wka$e-}_F-03xWwDhErNN_z<6 zHNX@TJIzZFmkBUl6YObj_F&*4Cbi!DPeX5UMjib9VL>*fnzl$SG%<92E@c_%YB<+iG)Dt_^B`)qqe z=jcO1Hs-I1;3JKzYv<*NNF(b)gGI>G@cT!t3zGyaK$fx$tB_Zk$IBQTq zn2c7?DmC7l+qvBogb=2e_K8~3;T2GX+UU8IHh{;Q7{?c6H3v+Wkx&ZP5FF^Z`{;}lxGvCsnB+ZGFHETR$V}(Fdx)i0_T5`>_Wln5$v`&02aK|q zoRZ@!`xSgwXOo{dGdUVJio0)@!;Al#*t>ou>R+3%pwr+z0Avd=*BMC8(I>CZre}L@ zIbXK>4VB7T2Ys_v7i%6Gqfof>hY!@Le8bG;Sc??oqauCU+Va_cau#&LyB9$hxdvw~ zrNZQ9pbGccrE(=zCWeh|A9Ml6DVF7m!{b)h2nN?g2s`Ss4#=Gdy==^gfiEpm)@>w& z>m){?e^OB9=2!dTUc;lHIroVA{5pH)x61OozbCq!F@Lb}teDykv$AZ1rEwgt z?5+>WwHJTGHr2O{HkZ^bQB=mCcqzu_>9xO$YUk_VfFzy&s<76JLn@;!cZ6m(#{>)G zC|x7f0U3r*^>Y!3R7ADHHdGK7r7P7xMFH@HTSH`l{j}Afu^nfIAAqha=ruT(?0F(A z+aGNOcVI>4gE&qr6}-kfiw^JumoOG@F`cE}nUH((7-H_^mL_6LRvyu=>`9oM051s1 zhbK^_%T-yP|Glz2<*%%->Z*{x`4^pQ!qYASud@g<*7p>k53s|HyKk3!U;b?(@BU3{ zH;q!rDbpsIfgJ5>{G%H3Ta(A(M7NIr~5K1bz~YF%G(T91oD+&b>jeeTtbbT z;+lwFxlxu+`vwlLXC z1|Rnv2YWyX2rAQXp3Yy}IriwnbbPdvvIV3FrnOEk8UX^2oiFN`euDKgbhtWS?Z|n? zCffV#X;YE?8Vk1l11;i&mWjwbeCnKBMja?pO$g*nNmV8THUY?o31}6T4#e;sLQjRw zF@GG);k=BP3nSF&268!ajj(K}A}x-DC!ZCG_0=-HHTcPFeNW%>zDJfPe;LUZPj}5( z5i2)DXc)Tg<}49;Ke*3l90VUoaj>jRMS!{BplH5LI~fsUeXRsM!Y*tl88m( zAtc)Ns77NLtO-WnvpQA|3zFPO)gjsS50&N5{g^CI`HEB;=WjWu5NYEBI)QdPowj;T zMY%D(7)cvEz2|m0zU-Uj_|8{Ef6#xI7c|iooI@-eI+pj?(Qd^`Xgqn8digj=WxJ;l zlM+rl6B5y2*Z~KxC~Lvs%FS~2=YK-<6TeWT&cfp16S)2Wy91-;c$h|yrNC?S zP#6)=pM;|lv3ou!hnIen*sZ@FJgG~ng~+p=mdnk-z;bHVn5Nn<9qKDzQZh`zv8af+ zgV*9-_V~3R5cz914Sp-QqbVE&NybuCOKE9hW4XWP%1M6-F{{y3vZ@v8@BZ z2YAGOhP&w$T}!Ocfk^^*FpVhOU%)gUPfjjia-HF8SC4!dz zp+~o|?;Gt`A1wN3e@ygaKZA42#%9aB4*cfmf1DoLO6>xyrZI>I%8H2C-M7o(rQa%c z>#vK5!A2mPJVBx=YP{xFiTOwY{Wt46kaTzWOo%H0FVZw%3q&m{LT>~vvOt(Hy*{m> z9wd_tv7(!vaIwGGadyg_b)oa-r0GGZb4w)^WFaDQB_RbjNI5H#>k>2#K2;*p~%fp|MT`2I6^UgYjifIj2z)q(1eqsjc z6|{Kw9dhjzFAU?4aRmeArASpfa9WV{Z4iA&f_F3y=N3PK8qfR3TH8ApsHT0rW?-9q zt+?gX=Sc$_3U|Hmc3PxYLY{!rMj`Tn>*W9%tGza^l+-1IS6oBn)n4uof=QUg60ZoxSIMvU=jHS#)cTIcJQSl<< zdyuvFFOIqD+BBQK-$ZNHTSJpfip|Q?+nx8^Mf8Q%jf@s=)<30 zC`n6JmLC|3!ta9xpaui2Q_dPcP`0;@5+CorT@Ek*HnDfTLgJt)(!%wj*zg0oiw9*{ zYaIJIczQppPG!sglb>k{^`6U_Q9MQ_jEbqUJ24v;GLU*Jly>az3y(I-YP(qnmJ{z? ze{qup=~UO&ni#2K66DAX?4pi1qhBVll%JL=o5-JaC3f|S3co>Z?B>VDv(FjQNDR8b zvgYhq%dMLHjvRC2*kfrS(sBE22NjBgrhzmp*|)D;!6Yj?`=SHDy2oi7$&?;grOC`?Rps6`YVc@j@G zOA7gVG)>Jwe%89utQAm>+a{JTaVj2N)j8dBr5o4Z;G#;8+SX*ohj=PAifx;~MgrCn z&0g(sFz;(tqm>d!s;XmA8M5~rKwOO5ftMOcGeVAFfKkhvO>hr&?q*|XoEy?=boK63 zM?pF)Gz3zekrVEULT4xE`vv@FMZK~KUDarz3A?xeyGtpIH?+TLAG;SahkN*6h9 zI_B@)@FUFlOoJ(%u%L@yylOBAh&F+%qdd`(-a!xp0@iK16%=X;7rOmm9ZlDR{j&eK+Znrzgvxe>ZS?}`lXH`_J#-D_~|H~ zP2IK7FeaacCn93^+#&bA?3-8PcPOT;GJf>e+7^a23sNI8ZuuK-`3+4$CoJ zkm{9=smMC3w6je#R1Xj-I(k7NNdj>y0v0d^|}?Npon3pLtYl^ zMms6cFoooGlz2YnZ*f23LbRi$Zhh2e%JNx1EPD09I}BW}MNVB|@hm@g@jmL^9FzZS zJ9TbJ&%Ga!dtde~V!p9Q>e&e!m)cH33nfs?2(tPQ3FTRN$17iDt1-qD-J56Nr+P9(Il$f>;d1u`olv5dKJ2!5Kur z8ehMQ;;?1_!+|-HhJ;aWyY;v)7X7@R6}|3(GaKpUC(PQiSex%54cl*VSO;cZH(q(Pb7Uy4oHp2YYDKWPj!}e|&}`_??k#oBXcy-O zV%;SD@|PSEB8B`fW*;E6bSpnv9*vj#EP(0~>=w`kLZxOn6qWiV{c6jp_l({b_-|=d z5rNPY9K44?LT8VphItBxw&MYL3JRfVGYwW)OrLBMCVHMRcO7~>p?JVv*`P;9f&c|_ zZIuNTnd~;_kmEMoPiD4?$RhfbuaL9P_(9RL8%gn5DA@!$hJ340mLo@NMc1pZB3js_ z@Vi{QTkJQ#OYE)xtK`6AIkzD4*A+gQ<{6F(ztLN-1b`X5I3(qZ1*_-V;;ItxE)G(A zeKG(5Xqh6S|FLb100LFu7+W0|9sLNxiiOTwnnS5YsGg?t1}13pkSHs7-CGH-y*E~( zs-lVn|C1e=+xVd;iOiIqyMoLL!fgz} zdHUo7rsy2p36rc{HTQPow%tlhKu8aC^NNXxEO>|`_@8pJBMsId7})Tn6~t?hN8T!S-OZw({7uj=Er#sYq5)gE&rQ>q zPtJF<6-in~AH>+PTXd7z5-~8_G`+H&Qgh7d#UOMMJKQUF+v~(W@P{IIzMDtpRbu9$ zzBvf}z3=CUw^-A-z~<;9Ze_Z#yEAcQJ-=(QYODnR(>Sv;Wsq055@d55-%uhYVAU2U z0DmP;+L}m>x=Q%1Z!HxdruBJy6?k|dk21=_MWcWYU>Z=pZNN?tFq*<(E5LI9?BcMT zZS>=>w=%v6V+}yp%kuocFUu!=rI;>#cby8Ix{DyTs{&A2;X~~JMl1hb^fBnggYIsA ztmt#TCnGCWoJf?=bWoTm5G1wqu!aw(tY&*_2=!Tzujq}f&~uS-Ew^U%jEEe zpA~t}t3>3GgO|g3{lueC;ha*-x-}i4v#XxC14PqwAR((Z%I13cS}LTi6rH%D2*dh5Bo4+xB(-Ki4=!;DJAsmL>i_7ek;PS zd(z>&nN1pX&@$auj_4Bf-@e*%oh+aFy|O&@t3>3C6pJ|mnysA#zQLVCw-CzeMO$}J zX2#0MU|c-%JI%V)r?80Y2>mu32?cgk#nkDWBbi8hHXUd$UlBe~bdw)C^%(j?;j4ch6*Qn#v_u6Bt>mI4(^ zi8N*u+@NkOC(dayb)lnUs;L+_hKypB2WV=E!6UV!zELRJ+FRO5(&;Qjbh#>LpY)Y- z_WA!p>;WHFT(@w(OluJ&Q44y-2t*n>ziw*V(|L*1bA5U~2(8)WR8NqHy{KM4TSBg` zq#ySy_C5)pS+NB24sSn1P5(o48bw%Aqy)AYhcA#dWNgarnB<_*%y2xJxbe>4kW%KR zlR+7l)Aqp^w`G6Xr{(eg$!7d{j3Vz9;1FKDJ$|!+Pwpi5mQ05qRFMc3g$b#G46#R9 zN`U{FrjkFitg&3qD(LX@C`a|FsAkpFC!M?mW*AXOWO>B12R+j;?U%{T@&Y)7>U`4s%xnVH=R$t)eO5qvy+PN#Hl?mr;vDV<#5LrOE|RO z>+=jKva<-idU4+Q<#bWsOj1Rm?u|}I9i(t$SoD=`g-No=@{Dg1x#3~CORPAPwYi>h z;2|CnaJGlYfSo>ac4cX4xjZQ@IFY|KuyRhjP-r+xX5ItZd16Z2z3M0 z++h2v;%_SgN#(M|@zQ*b18d7D1?@uH9a%MChF^fVFCBcC~k-LKyX@hI#* zW4{dniXy2%ooFmv4rRdTgnrgpRTck1(T;k}FnSU(AsUy-w$-w|@4z~8;!~=XazRBV zon4i)PyYIX9yA<084o%|=_6F(B=JFSQ*_-a8U@j#PB3NWe{oBbFq@HXWPip&=_ z!)msQ{o3XR)P#qCkk`IB3?IcPP5p3$z=v9{%YCz60&Juv3Fnc&5r$1fKlaaxUjGni z+#pJ;)P~mGeEYljHiAJfL=5(cQ_m?~o3*yCz?=pc&qEMy5!$sXNIAOvp&vPOq_Su` z5X4<#<2rc3@;WATmFVTGws<+H9?45KYEuB+6h@&O$AgXnY9wtx#CMBUeGw4`6#e+m zsj869q~yJM4!e(X1CkhgWYVBPN0nlbt`d-nM=VU zziYsBzUt{tYgtS+Qcs+?J+y?OuZcb23sEOwO-R1dlgTGrdBAT55|%LT*2;)ku&O%> zN*PQ^i8>( zQ0jA@a69q@kEnt<^vX-hL8IR~u-tj$r@#3U(;RrJr|}pV3hv%TCq`Q>hz8J?vT=Y? z{^AXTCt{qI;K|ZnuIQ_y5Ha7Wr=mAKY76#^aQyi`BY+>%+0-$i4af?LmQh>^JWf~& z9mjlreR1ln`AY5it&n2C{`%Y|tZf#-+W1S2LfYR8(8#Ep98jm=_X+AizvH=JK+T;t zZfDlwk*Y*qGAOhl<2Id4fRP=NR0TlSy=uIkU6-_gV0rST)LXhGg5ZY-n+$%V!6uCx zolL2Q(0v4X3G2TqR1&j1cpiQfG+?4TCTQPUt)=5ahBl=^Q4}I<){}Zro2n@p##z1k zfX*?}TObr7hC69*G=OaIr5q@_MT^~;YvV28$g(7*;Z#919Rt~dD!~et$9?2V%6ixi z>Yd+N%CAk1K`|Q8zUKq(Q?Qn{hXt?{Ia7{3fR97?rn_43|VV_VQ}VlC*F zzSlGoS_fBCJtH7E3dEMmiO>aoUpcE`!HC)tBE#nVVTGdFiu1`SbI;?%8DeS*m&@}} z<25(hRXG(J&*UL0ICVy|Z!3z-{w-!A_uiiTjg@n<Z+eLm4bB17zwXM(WnaEbNw>V z0iwGIP}ab-WR7P-i;)|=tmBP6XQ)#ZO27E+;<+TxNz21Ky+ve;cMgoCX^IS*mr=X+ zg{Mt6h1k5PD~^5WPa+pY=YS{Z=JNoTK@AX^HV@}?Ikuj(1)tr@rP2EV8WQXJBdu_6 z1Gyld9;*W>>;Zzs1wk`xuM@REYtfJHL9E?FR~>Y;T~`iy$sKOL9mQ_@LnsDvi!kMX zVO8kld2to%20ifH32pjDul1}9Ahu*MFkx*>9V~k)bLq0Qs+)t+AipCx*GpIkgf31< z*^&`nQa6Q}GMY`vNVj+~MU3NzQEv4Tw&spwxu{7Al&oc1zOdERYztS@RY(Z;-}M{Z zJB*+ZmED208J&3{dvgSlDtOx_6vYXa7h7-#6;peJ82vOIpr`4OAL6;zG3`{&_a~I7 z2g+2tjk5;mTqd0?c?Wea^dLhc(^g^nM{gDT;2)-fgp)<)X#SxWMC=HvY#C-6QwbRn zl-@MEwGDYH@w+RCkzVLnX474Tx+sS#%CtQZV*s?*7^Ma$@?wKIsHQ^ayuoAsg#x=+ zns&vJKKzVC8s_f1tR=%3K~e-Z@ZOkh;h`v+Fwx%niz4^j4&>%G(Ht8?d}Bkq9;Km; zRwxxRBRW+<^Z8@|4rv3baQ|uHTM;kyU_eyO(ZIc~ewO?buA|aChz5Qw(AWQh2ckqU z@%*Z9Z~0$EkM~5MqP8vCs*32bGP4eu_2`WsI9jJbn1O=dEZbe7vsQL{PQ`TM%}5X-)p*n@xZi#i>9(-5onT!ZYrESiz)d z83=TUOd97CROod4&|BsBmVcMzI-Ou>3FQ*<@9>-=XsUWSI=$;b@#@Ar1}BSxLASJ_ zN}@S9u2?suGr~$%4Y{qha}Kznp)?EpCCSx5u;<1L+%{Bh&*Sg8U5QA<@eiv z6~Z9z(T!JE*GR*13PpE;9?uo+zy1L=18D;1V%cxfmrVvL1V{$vCWKhWxhmBY4D!f* zMp_ zGuI*qj@?X^_Zgr*f7OGpiR!XdTLUIrXTo!p#YdxQaB+|Mz^YhKQgcaRL+y~t2y*Dw z!HloKm_|X_aVrpWQmq!pa`m z4FR0B?VeWxTkxLb=?JLAlkwoI$n})7UPR_!9vCe=T$9E8IFA4Ob}6F zl0?ByMHCPE9wpA-moU6Zc`3>jZH)8UDtoO&6y65Pcy?A8R}O+JC<;vKAQbWGF>$(E z16C-&(Yp_8I_*8Ll4~#d>ms-P0sIDsECMgWKuf!*BiZ_ng2hekw?JMdGIWmRg4m)Q zwaN-a&2@*=?nVboSgv#z`eI*!a(r%b5Mlm$Mb$AL?~%i6|68$F|06iKcJK0xRH zkd&YeFae3k7i<8I#QfBGR2LH9$*jnoE#KQ0Hp&6EONYe>>~<+44&dpOM(Ug zZk?$)OH_i!lJ(kH*Fe>P-r{Jsa2zCe z8Fl+x>U8H9H((<|r{l3j%F);|AbcY@3}*cZ}>?$ z{NXRi^023gKK$w1>%V=W$>xpiP!ge7=`KtOvbj1_FO^OU#{~=y%S4q6mtof=7C1$O zs8Ix!nHLbDapV*eBEPMcIO`AOvn_I3eKWV|uLGwz38y%PKgYR5Z*%*4{LmlE@z&oG z`M_&q3e`vW&6+ZC`_5TKHFABm{(y5Nm|qHPEXb9ELmmychI5L5s=XJ7@MmA+VL0%& zTqk*GOY*88=ANxy$z+qM?b{<&^tzj|brF&6Xy;Ae=m3T?c^3(JnsUA@QCT`#7+Vz5 zpRp}bTK_blMO9JnAmF&l<>4m4COD=UKCXWccZ$9D)ne~`6&!@?&P`GfDoj13u*W!` zdXREK0HactpU}V0HMx4}PzKJ8mvyQN;XL}=qqh++-1KJ=m!LvtyGpugz&EeAUPwK5 z)U9_q*1ZZnOdj;7vM`ZXo}N3C!hT`vS0Lxkg(x4@LJixBE!W|cUquo!Agzt_E~AG$ zS>)_$I5axu-3CKYktIhVaBVs&N|K-te}>c(cZq?9a5II*fgPg(Cr~-S#HX|3R1L1T zrj>k(ij_sD?4YiA4vMs-wJ@)W9=jzy;WaEtM6BznOWXtQVLoZelj`vidDh<)t8x?v z?HkI}00Wt~rPxxcZ&`M#G2oi}5HMk>yY$xV0P1!&V2~|=#?M8h6N{>e<1?y&9Lhp0 z11&^(fCEYmE4qL(RwJchum=4jfC}dph=?sKoR8zkU8f5=@;=q`#}w`xw1Ens4}Frf z653lcW3p`CyQKnGeiZnBrCk2?6DiCY|7-q!;TCacS2R+)LN$x zodpR;8Hf1_I#lZ5*(Qe_*Q=b%_ueTTWzKGtk9);E?aF7=GY`ICiF&iUGP2dElw?FIk<;rQ+hunwidpGp^G>wEI|T-Ryr*_s zmCR^@Bm^(PvK6rtw|DBK=+?eJ<0037L^_~Xbq!q7pZwv)C%`YuubjakDu8z@KwEJ{ zHI8FMvl?lrZ9Kxi89j3_Z?#j5;U&!pb(@T{Um3qMHJ~)4G(jSzAGLo&hh6q{*Kr6u z76RKsRH`>UO7xMRPNzs$a5AlEC%;qY^~w8>bYnU6Gv=5xC$t#POceem0C)}_I7nyDOxCXmf)Und?fSN zjDT(T9rP%kGnY#xgqdsG%DOh~SJeTJw8pL0u-<{zIDjleSJV+o_M07jI2m5VBD;s0{Nfa@cMErD9+AAM@zH<;;D+HEUOA0qpgqsJdrCma?oy7b-6Bx zJmXob#U%83dE%FdKIjRhLMZQ1Oi>v<>HyOn?FLLvgYcud5~4N_)#%Gb6X@x3fW*tx z3lT;=JhXaEZx!EE<2n^*3Mki{kT9J=Ti4a!_cZuwXb=pF9o({Ohvw+4 zTBvj34&Qn0Y45)X%o;flnc0!z6&as8?oD=D#rA6KO@1iV*A_9Eh#&hmx3N;Zlz`%& z991NO@n?2i2JJV764nJ*5+>2B50bN|{WZx=cRN=LMg;+6Mp)>?>+E`2p8XwS%XO05 zYPni^-g6(4c7m&EXO$advWv^KU4h72MFYqs$Sp@DH{n9JV7lj+D6;B#Gtx#!D>b0# zTV$yCFG?pg?-9xGR<9#$fE3{i2-QK2t#}>$Y*u<-7*5ZDb2kqRaBqu#*4pDC&panW z<97m714mj9oD~BDGy5)5bTX^)m0(?Gjzp%Ql6{^Ni=wew`enrqkrSw2AZvZv*NHyp zarp$*iF!q0UPL5M1*SMx45ePnWB!6HPxun?+^#OgC6*HaaE?wdFaR$oDG^-$SOvew zV;+owtdDm5)yVFBM=2|`W?UDC;`~oDZ|!k-ql{3DN8uV>t+awW!<6bozSPfCsd^`e zr1)=+z7I@R96^9q6kI~&{cE{AC={ZvVIMJq6{p{vw>xX@MFm%07rGMzdG^47%`@uS zi>`dwMh%D4S8%Lt8!m^1mDN~3Kc}}mOO{XmdI=l05JT2uscYE6x*ITZEl?I>pnDZ) zcvu(gBo|w9w$}etIKONwG=2 zN4;WR@0d^f9`i(zKDlh1P=@%($@wVGt?yQi4QCvBLKx{x3T|I4B7@d<8RK2wv%vk> zICcF)W%)BdAbRBi!E0KJ1r$!?srf8`NfA7jLb$R0YASO5gXPL+{GiB<50{?1>?GUm z*Y5RVQN#EK3WXqT)T4N>o}Y*c?9PM(L8GIDR68`Cy0d?+KJ!_KALO38qR)EILPoXl zXr@WUgUv~*-=3djDRWP z9CrxZ;m*bmm9F}In3zIFm|-;TbvkFPi)|QXO`3~naJ7-9=l4-u&R&^xWGA*gZ`Fl& zF(PNRmSRfja@4a4oMTAeG?HbsR>Mo8x9eD;d`XFvSqsA(xER}bjm`Ypp|$RXR{~;x zaQtL&8Hs8tgYx#3~5{MjE9ee?^W z)SiQ@5x=^bh1vxcB;m!#=(SiX`tWDSl`r_0BDXvjV0pffkq0llN}V-!GxA;kT8?va z%!v$b-6l-w!;$<%Z6}3mq^#bSs>C6)b&R6;q~u$<=NXE@bCmZ@q9jkC<$V8%RT})R z{6t={3(Tdx{(EL8IXb}8t95mI%3Mpm087%3Hz8>GytEv==|-ewKEbL~z!zV0XG@cN$?z4jsU_{ca{gkvbpseHC7M9YHsx9zR3N^8`s z1aUwpy>!O%FmvLEhz!&YHL0@vce__=Qo%`pP=q*i)2h*(s&DV}&U1AS!`QdZ1H)Gt zVTpM4San)koz9;;4YuZ;bKj7dJoQvK9lVId@RO&F7EU>=hBUTj(h!psc_L9dpGrFO z#O_3rj@I^&+`7@{sw|KDOR_xo?~2~|h*9o7eID$xb9PCnh#roIl5nxBy~tJzypFg2 zmKlCyRYb{OhD<~gOUy*2+!~w?r0MREl#q8WjeiCv=D^5 zKuJAfOhTYtN%er}N6$qk^ZD*{xxq;$CwV0zM{|WS@d46tv5L;mbKNY_*@&kaioudo zsV?tSy@;Onmw)%G28O(EvO?t8$B8FZp@*-=?1RldQeV3@^`rz_a2X;Gev&ND|NEjJ z`&pvPSwemYvc4N~LbYQkaNm~*fTf+FSNgaJ!68>#5SE!7-};Mke9e!F-1hrX4r@mb z*#&kHR8VPhL*?`JdX6_`xgPiXd43uZva-N;SykuNd1ZbUA{w^nCb(4E^PuYl@6bhfM|)!SgC~Fe2vQB z1ojEu7#&}(tYaNasmP5F7k$!~%kuPZ7P)#89PwS{)DiM1`81K7p1*cDoVyny8!AeW zAyfv-5xeIjVsHNch`sJ7#Xj;55l|TfST$?zn^p@DBmIF$3UChD?wgj&%TlC%YC>{q zEcx7!D!I>&mZ0h)^zy^RZijJhAR)kv)lt{rArfKOx_-Lp2XOg3q%#ZG z8pi2+MU3olDfz}Rfc*{+91n`O{Lzvn(JL=^=Lh8Y`k$2JoBrP-cfMa@1f4K%X}6$f zo9Z^DZUvSg?JFsVM`S$k16`v7sbU+^WOX9QgF(ns+^aD-6iM?;0%A**a1-gCw7_%K z`&*w2;@;JP*#s~`i{$jHP)s=gV$M)o!1d=nqpG7mzY%aP&ulyo>HLRy*Ubx zbxEnu>8raGK;UOLh&w;JZ2t>?dH|wXUctz-| zFA{%Hym2_mv?UI~$VufXIop6cr;M1sjUiwno}qAw2c@uk!2Z*Em`jCIMe%oO5CH-3 z&GXnlqBsvIQ!X^YV{R8gqqdrdi*he7$YE-$x!gOEXgu=MT=9AEW=V()tmH0OTt1SIZ#lN@_6 zzRg88J!bDb`A_!a;ITV)-{W^HN5)+`gwR!r;KwHSlG3L9TMt59TVDz+6bSgC>m^sd0GR0c(`s zc5B_y<43Y5)aZJeejru5*vYeeA}D1R=hcb@62s zNfLBW?c5shI{yIdee##d@{Dg4z2Tv|GVi?0 znIv3G#0P{imwLzZbDXsqgMesA2rB~VrI$@7crIRQ_ON%^ZND$a*ZfnlxBq*Q!#!(< zTH>X=Xd=2)L$IC-RQk!o?K=$Zgr4B&`A%@J#M&3{f3K4rL;`X>xBg9b%TlEqDFJM9mMoh$r(;x4X&m#^WN-lIGa2ul6F$hO(Bp$TBPz3_9N+b8a{R6T zLhOTY?3L{%#amqi<_VDj_*kE+`3cr{9NJ?;oVkO2#R$>U3p@~*f&Xa*W(U0?fo4B+ znD}Xl4t9DzASk>DBvk$7j-K2nUEA&t@1(FT^2fp(>O_w`u77c$uwaI5aj1*SqaJ+A zFh5WF8P5cO;ITaXnWE48F40GRx~QJ*SX5b(9mq$|kw+%Csd;^1mt_ljx;lV?BomP( zC9AEYJT_o|3V3Cc75y4B4`}vN07Nc z7fqOZI)b3Bx(^A&wbKG9aJMgx5ZoG*~+UqvVm-o(KXn@jgOG!*)J4*;+Kk^U4J6UJKdRmUX|by&vM$T zCmcy%3S9E^i>8$vL?JQ_JzI&LV56o(FcJCi+vWJ0A6eh`W(Q)tislO-==i5fa_t0) zpvGTRg`w*g;s-{Jx|MUQq^vBeYpE{G>~nmO8O*>;vI6Iu5v@krjFol~W$50b5P9kz z-oL1W6{c+$NO{01r~q>Mm?fD=#k_xWe2nX0FTs@?WO?$Ji$48th~D(E7hqN06kYMa z6v^4Cw(N(1`O9-1Q+=&%d4RaqQtjNWk4@(XM4%4lLC*IkO2ZzJKSfh4KA!@!44#I}-RX~HMHhis= z-#*+e$3Oa4a(K;;iroEvNfyis)}jJ*nz60#PP}gkMG9>Ks)Mn^$9*7i4#a3bqsK&R z-g9*Opqic8Ru)s)VBZNEeI$MdL{8%lEIVmGnB>{@4S`$)k%5)-=0yKqF~1i5 zl;=eZ9TgIjEQG~0x+G$EzE_T~`>A#Pz~OE=xrRLisCz#S_iC?7QhtG}*Ua8;WXDDP z>XbIM~v;J9nRT0ZUZd3ts&yBhpj!x3uWnu>B2Lp>>4DpnUA zLGEm5R{I&3U3q{kPx&fYp7D($S0B1Qj#-`v>@@BCnq%Uyd?#ezOOC0?h$DIrhP?p$ zgA;W+Zx^X*d8l<_XbF9bfm(YQrNmXl-uGHLyz;*myY<&a_354u%JKJqL5{!o6C#I?g7Ob%91>l+PIYz($?S1ZBu|l_#ihtq! zy*`Z2k#agO&U?+bFa>4TEmpO&v8hKVdV#|51$C=$2au{;O0iXl4u+JEo-UrY0)$B{ z_#9?xskZ$I{Rnq}VI&iF z*5fl3g*lNbn?-98ASzrPMPTF;OMIC0q! z+SiFZ=&@7>!|sVT)(yWmpG;ZSZjjSJ#O(S%YgnJ&E@crE#X11uRPiLkZznI>UkMgd zBR3#6^yCdjO|A>_Io_;xl;gX9LyoWh$71jO9SPIfn`|Gu)6JV7sJ~pK()9kPa$i&` zR9tdm2vSXQ2Uvry*bM##EetlX`nrbEmcEU*c~Xvcz7<$@`VH86w5#_w}wL z!Csz_cuoUO@L_;9duiw>i^7Fdsl~351CtBr*t<+cdo#p6hVdOfuZG)3W6YPt6i5!W#;rhD7r*8) z>Pm6acabp`d_E)iQE;xDd&|vX0E(6$^f=LHe!J-7{-Vfo9i=TG>YYCBqu15=5^?VJ zavwkIXpWrLNO2arTd!U_UbEcn<996wR0v9-Z1^{u3>4BV@&`rM6!6!E~0m+!=%ib*66H|PSnjN?x7j}5#$VxRLNqF zGBZ5xoG{=h;|R<6wOw^Iei5N*y9wv)o{xn3r#An7;(W3By_(Q(G>BM8Q^Cikh&Mb; z^r>Gb%TvBW?D~g@7U?zv0>s)Akii6t-dE%2{iiK|m%pJrxqD zr&zKV>|F?OzTH|5qjHx$abR$L?X*>lz-S*IdYc@6=ZEC@C;yx1@m{D09<1D+#JIaA zV9Hwgv;k&x!Kg#j50{}Tb`)8(qJkm%pOey5sTn})*$1cN>NU=&k>a$7NIi8a)3r_ytU%~}+Pk0-&Ce(Ob2WgD$|L)4G?*UGAhha=ir(@ZS)TQ6vV7d1 z6VoNtzIZW?Je&a zv2y8ecRUjVC#GW~z=t97YN@YoC_TN5n8D%<*ovC_Y0ktJ6hInKZ*S+A^;TvFziiG+ zZ~9nSp8cJ&JpPMC&aNBIabXz{SGN=`5o}+NFVgSrnR0M_lYK3b?OzP)Cy#8b&vPF* z6cjzwgj+;MW$`{&aRW?Nna+*GrvRh#)k_!8m#W*w(P+HkYR2*l&dH=UBPq& zQ{S;2W-<@o_EemYK_t&+b@dRL2BZ6iexq?VOXp9tB!1_nLpx)gqH&)w6=GTe|=-= zEJLt59je?Dg_Jsd zqRl2*bkYNqION}r@~2V(;6n+lGSp~LS9dfg1SjDD@XbJAxb+YL7%V$~nCvY(9Y8QbLdC*vPdDv?|R&>+@vb*SJVdwQ~;uM&6|HQI)PHOjd;vn1om zn)lOrufkFV7Rps&Vqr>1NE7M+03yFhL_t)X?Y|#!Otu%|?0WVA zULWs~!yEp&*c*OYn<<=e(YrCLtP#-(V$>!3w_(-r(pHEAje@$1%!O!1BGQjy*g_S8059pO= z9Gzgwvd8dqP~z^hDq?rMbwvYz{BK37S?e6M@$?bTCE z>U}Oe^vIN(4IbYLukM_~$8*W;HD=r6v)n!(Ydd^)|Di9#_O{IJaAVQre2YhaJ+rQH zw#QZt>(6N98Id=C&J+8p4NktZ^7GKI4J(~^vi$qxK9zPoy4*SE11wGVB^ZF?9HzKECfKG#qgKNPM?zrMEO+*0cJI-NT=p3--^q zqrs+HV|H~|-8EB{_#Hvlzgy039edZa(zMC7-3tZnnKjSG?#;X8oPmYv&hnTS6JIpx zZHY`y<{S1@GRgFJzR;j2gR73%dM;nteZ$ z`>AE?*Mp0ik?ZF_t@C2@lElw9Ds{D=vu*vk%S*zxFUqm+N}&-KBknj4e&bxO?7rp~-i#>F zY1xY&xw>~7Z8>z~(@$rdcae`9{N>=0n)hg%r~e1iTl$>1Wn6scindI`nS`p4qwdV9#zYI#`0(5I#qo) zXHosz9yy-7#ysnO`qApdOxap3+uwRt8LxifBWk$VjJt88RE*o(zO&n|I+U1k?do%l zUsd*a-{SIu4QGd@geL!Zbau7>ZU1^Ro1S$q*6(V_(NjaOuWecHQ&^(=` zWA?{|-7Wl_Z2ROcV|R6&rjY;l@R*GeU2a$YSoHoCr=>BGogY~=_Hx_Wq-^y5mP7X2 zR$6>ylJyLab=S3F1LN{Bc;=8}fwAT<~MS&iGk-{fFONeEI%Kk7h5+7W7zG`Ar*JujPXd{K)Fk zWX8}w4QFN^8rOfp`Pj#^PfV)j9?>djdD+fQUVP2r|J_m>me}=H#Ha#$yk9^6=5A9p z{PEmrmvv>u)s6Wqi>Ehn+Yk6GAH)#5Tn&k^Lz7&49P1R93 zvgOKUG4R_4>qQ;D`$TVikn?)nvuYOF&eu#Vl143V)pO|$`?ojSo_$-@IU!oxE-Ko3 zSzMtLf#o)9{GF!vE7R!V!l;sF#TMLOUhnam=+MJ+JGkUITYUc`bE~Jj_5^)N&N};8 z#J*sY)T7%QCpEM!^VP<3}fycOwH!qr&zgtI2Z1A)ipNH_pP7K z_HLKAfAEmNnHQ!kyyuvbb9ugbVasAWORjF*=<&c~dtTJ)J0`eNa>PmVw*_yL2jx=+-0ka8?qjh< z)$-JxGW}(yF3Wn2nYCwh>t=_?UCg@WQL+1(9J2VQTy9>%-vQlpTM=dP4|@H{epLAU+&9A3PSDc{-lN&M@OPxs>Al!#iC=Sx7hRZfX@ou@{x zUOp?ff4O=En|ZDwzZ-;ftRL{aTcV4HZNHQEyKXmG?e;RZ{M+i<_=b5+io^y)7U}7_ zOQ4L)(p4xCP z#>DsFigsOdIkkP@TKvoP%*`9MKK5m7VokrWwY%O-e|Dh8;l*x9ePmvukx(XQa%<(u6Nm|QF- zdT_hFT`OeIQc_#adijCJSEIAo??|3rwdkRdP0zN4U%1IR+z-j=NMklxudmpua>Hpg zE9`ABtL8WI8)zNJ-Yr*Kjj^t5o#%d_$(P~77XDaa>323cWXa_xB@eXf@l&NU5jDe3e4m%?+RPstcirC{ylnKPwg;cLp7!^XM&7S( zB|U1lvF6=Q2{YXes5OzM3tQt2WD@!v&-$)mEL#CbD)&(>$rJEE!JJVKmT!~nCPpOzMR^A zp|oe?w(GYY`uz8`_e)+|Ig^ieJIy{qp0c}8NOPyJf5@W|2g)z9HNQ6OMr!X{K3%dT z**L`yxTUGE^2v}s-*32g@H*4V{lKw41G8IOeYd&2DWd+%ayQI9t#|fn751>`iwEP5 zWo-A!dPmpTm;Gkn=+vxUt4U=q^>4Z9>t2UPt&I6n;#}D(CC`pBJ31$$=i98EYL87FklAm>_$-^_JR(D`P03m) zYFI_`ByYK(ms;2Qsj0V@gu9fi->$^YaI<>TCN4|08kF~R@5K{mKCc;-Az!_k1Hz{@ zyLUgSmVKoqW-cq*+B)xf^FCbDqDCd{rW2_1KsN3!`>sc{!(7Or@%Oj@oBF7;iH>Z0YG4z3dO=sMo1# zwrkx-OtHGOJgRO$PWMeg9;;jR$>LkrYux+1vsd-&=#b&`hj*~t{;6jUHdrxWQ8(*yE+u?jJO>^SGO zJ?l=3p86#2c<*ext~DRL>)54IT`SC9ab!!)*_|wJ_c~Lg^YYA1BG))o@{ZWIW4y_< zEd}y@zn(Qik1SqUGG{!K(04$zMbN3#ouvl+%x^~yqAzGOz<+O8-NI{kUEh4^)R+p} zyIXeu_&9m(i?+wJAG0Ws!KR`0jQ(L4?fVZanDXTIh6vZ$z1|J(CWQG@-hGo;v`S~_;q;S))d2Omss5fnaTRQXKS*@`Dlp0;`- zIV`_ue5}XnMqgaxmmYQA*Uxp2>kIeDY5VeCkq1w%whJcbZ;h-O3)m-1ss@Xr%xUbs4I(}z=IEx*pl z=f9xr+MEFus~&O~_rk@e=B^;Kzv}FAyMJ?}>DLE4ckO#Od1uI-)H6ZPgM&YK&F%QY z?Z?4~MOHh;q^|tgF(_t6)|>u6J{`JWa)yOXm1+||ovTssX+&se7sruSgQ{$}_u01T z>mS2hLthq|8T>Y@Pmgb7FWcU*^p7%oIIjDORyN+@b(hE8>$$g~XNgjQJ*#KR+a$-w zm@D6^*%iKPUfkK`%X5cGC7-&kv< z&$)-~nKrkzSC&jOMz8-F8M&cxndj{rRf#BPWwt!nylKZ!4{iKV*C*ZVVx!7Cjc>Cq zHlIVa;t#LaUp>h_pk|l;T{lgM%)fTdvM0H!G+w)|^Nqs`wmIK03EtJN;?<3H?#ErP zRj9$rN>iuz4Q`s)Vn={|VJpr3DtAoYkxzHl>_6^efa|n53tTJ~*J(E)4Bv-|*u7xYvKG0r%Tz<$CH>;BYb^K1R-`+PEZ#0*;YEH2LP_{%ll zqE7#udiw8^K6!RGo7iEbZQTSL+r0BE`-ZuDUz+%Qz?Grfi~jKJ@n&q(qTiEUwpPwr zE#jP}`|8l}64_n_nH(?Bp>n>~`Qn$P+|{;Ubx50&EV3s~AJBHNUHKwmySLS=UALd} z;yT}RRvTJuy-hc=g0j5v<9y$Zt!Fi;x4QI}JboW{=P9&nrBlA!KGkey*88+O`RuF% ziyzN5o%VJ6hJ)UfiupaQ={s@hhl&9$>v~u>Y3kyB<X&OT@BWZO76HuLBEtzTNg zZrqVmsq03Ra#+>2uwS;_b3^8Z*>$SEVc4OFx+W(6uZz^59y(=Nlhmd~0={P$b;;cI zQ`14=pT63EZQ5t|2A>a(ZzBg(JNGSzgR^6o+v8HMY;o9RF}=R;a3fHHfF86s;wchvk zC6|P_ZS$!S!#4VxS)?GhYE)1t(no&)~M`S7M|-)g_&s!8=7n}jeR*G`@}JAPdM(3eE%+B ze$G>VZTDOV>g=1UDgQ7mYSf{x6PpZvR%A=9<8|WuHFQ{a|9fgsml4UA%P)`KM7|yw z1V52%OReqM_|B)N7cSMxaPMldkdtbQ@z;LRG_t*S({>N3za)%jCkwV>0v z?HB!QeZNzk;TJX?Yv~m*@|?-c*Nw`|cbM6~_r~IPe=gY?-+Nk#oQHDH4Q-#fQVyG( z3wC<7diFGAK!JTlFW>4hV^(OAUAb9yRpT3X`Oq-O;^4Y29qpe5hdUKMGNj?e!G$sh zhelb%oqtK5k(ga6&d0h){qVDH9|MLy_J1+Bdj+$TOZo@6&OG$u{(1Y@J*^kjxHoz3 z&g^gZY->FHb>f?R+rJld|GcEeiEsB~LYHRqIhECA>6a1{YlqCp)nGs#*QO7m9?V=D z*SduLHfs6u4~^;l^40ZxiX^u_AXV(eg2t?3pDq3cUzm-q}b`%TS9Y=I8*bx->fHRKDVFi zek5A~%e>879tnQeXIGxUY1u zmpVp%966(J!jn>WZS(pRoBpL^k1JnN*OYRa9uPczfF&7g9p0^KTCje2g=sO-{@w{^ zwhi;w+%+Q+8*J`TnLH>-J|>^j?1oF&AHS}vJhaQnMQ1q+89J3BA;yC{?Wor;C! z(AteSy0gNmpXL3M)?}(>y>#xBsY!9Z-q%mO(C*r{FLr#Rq5JOSniu#Wd3V_bVM9EN zeRw;x+tYCi&WvomrPo=HV-Fgh**VcHoA1+csjhGKwQ5v4G{f@sT|Z_YyX-@~2}|8_ zM*9xE^z79953dKzR%q4u>0?>uz1ey<42mnj*xRu9d+=b&4`Vdv&t%hcgx*2B9}TqoMSE9$lD zOlQk{W8AYo>C^97$yZ^A;sNQn2E;Gvhk_${n#zc$RS zSmTWiDtX!k9Ee;NWjSj4=v~gwiuEf{x`WsC?6W_N4cakrz{qC~i%)dC^5(0foOfSiM4(UasrjNxObooXD)7d@51D$+^AB&Ee~G!@NB8#~y|caC zw|HcsEGc709Srf<{%+NvyQ@w%Z&uf=X@s|#=lx19o^Ot<{T`ER*%GUtE+?~g{8(kO z^TNaurwX2#A89#qSg`>|ZZG%$Ftg2+b*34gdz5(@dEw@?iAPs;J8U|uheADbq{k=aQ4qO)VXy{3g8n4X0PX3YUc8QT2Czs1? zee`zEd)23g4xH*dE;P@jOt<%SepqO`OL86CmuG4Yue@viLyHHK7FdN2IlJ<~9P`Qr z-J5usxcQbkd^B?AyMj+{m#kf`Pw5QqgU?@Xno#cN;{_XAb>B4fYn~MQsctp;{?%q_ z22GUdgpKarW=}5dZW3It(!OF1+gL}{ucH~0uqT^+!7Y(Ly6$TGBqZAF(1%ZBD%GF+ zVaBNoR_+zDIX|pox~#NWtyWEQg?d}`f4KZ-)n*MJJ=oRk*!FKLgUUI!uljO-!zPb! z1>dao-K|vSi;lQQUDyHyAFO+)`rf~7<7B8$l4t&p2 z;$oH+4I+AoS$7a{hj{ zCJ#Pd%-QB)7Qgxh3qHx$D6nC}Z*@jA6Rnv$&0>` z-!kPHp4{=Af9`6z%lP^>4(NP4`>L@o?Mp}W|ML3$V%NGym%g=VXSd7r@ z#RRVGc6fZ{;2Ve(5FEQ$C=se z&E9bS-TFy?FKV=ZZ~L0XI>i>N zzP8z7-u}YRRu6AFV(Te?^JYE=E4b7Q{;Tn&$3=$@dhqOQT)fv`{VOeR7G#oA^kavM zo$3U%aOoV~@YUM&JtE3B+E&=T^Nmg9jfu;#Cv%qWnqytJ+}GxG?N_7D=w0oL4D>Ng z>AJQ;qhY6PHm;s>r%&|7(&tCkS$Ci`=bu{H{)c1j(E7vM zzS(rJ?4*4~iwDl>Hl*w{=P$YP1^npSvUJ(8Z%!90)8fz??~3zpw>9sPyu3%<#ktEY zUs=p!Mub~ln{|I>w#~A4N~`h(nh$#G_8@BawJI5=PHy$8{BzSBM`~I;C_lAl$z6%H zoSL6Ml(P0(+2pb9%;VhZXh$6z(&WI&2QB?e6$={lR@33`g2qD|CVL#ITlv(U>ps;= zE?+ydd(&JOBC1p^y6pJ%n8E(eUl-q-_+oCwg~?4yxc0m?^h>=@1>FX`I@P$#1HZ|8 zTT}`hm(SYl$dHufyPrh=?0%xrxl9K$jEcRw@G~9LBvQbrD zj%c;N|M&_^t6ksGs_dfogF}0~nbpdCUTBp~CK=aGZBe{*^|E(bh2LpjrOxM99=pm9 zv_0b7(az$|I(@Nz?^waoF>}UmiEGIZEv)9}y_B&-<%Sos=K8UusgKhP$D@9UUEGhqe>-j2tbJ#iRjhmX zdGk=mb4xDfvbt~9a>kN9L8G7cnO}JK$Q&0p_9zqgs%D+Awrz`V{#wg@PeI$yE0Q*R ziglZ`ao3IMHCmfa?YOz*yv}2?&s%$ILB|gMX6I_u{1g~_bVlwXc2GcLv2o&91yWKQhCnGM%W7@Bv_!-U-DZq|M}*);0#@EwlD zi`x&l`yt~zd;2NVO~wZP@TvAQG>2t!>X6#)o?O2lIC@q1*2nMqR~`OryQTG@qm!zf z_nFyZ&zzxF`@W65G&0u1?8mBuy-X+MnzHlM&o$>t1P5O5_j^{z+|g`O#kZ$h{mfx^ z{#*XY!(po}D-CPCWoXrJpBuPcul4BLoL9C_OyaU$Y5Q^Cgd1O~XTQFtpks2$A_+g< zpMJAr%aD7854>$O{AI2OfA4+&u1c1fWm`LC+IVpBk{#y;JsaM<%avD7?Huo|bV<&EkWP{*`}Pi))9c=}XUJGY_gTck8N0wdUQlFR{OCwv67jx|Xb) zJoe=2-skp*_bm0bdA=2^>+K7E;dVY?gxB|=%t09@9-r|vWoEy94xSwoF0|S@@5hHQ z6I0)lcij^gCLX@uVtk?7@yj(osz%n2N!a`RQLY2$R!o_i*Ed)Bhu3^;Up_uoXtOcfLsd2NyegwF*C0u)?z! zc{3NEvwlD^`yu7CcHC0reU875yxHFPQT(ZpyaRJj+8yhd%cZP!>wD|R+t+W?F1orl zJoVVb6(*If9P&m!{2CUuIrPe?*E@%-DUtuLvB%%d8E=_+WZRiu7SX;tj!(asu}+S; zQHxq+Snu$u>ZntN{dXk4sXN)ePxj;nCu&6pFMqLNs>i&;19Hb*y?1hNxz_nV6sz;r z>PY#Y2O4DW;9K%cy@C(Ujp$ypbx9M4x|v^x#npT~`O>ax(UYAVn^t!mm1Rco?Z)-J zOAZOy>we13Zh(ig-Oa#r*28i-n~d{+6ULPqv!&SOj=jw*W%FEeerN^F`PVtujhH;mzDCv2fqTP}O>A$R9v1P$ z&(ga}^6J~S;^P{H9N*mGN0|oCs()OW?RnnnbIx@#^X|O9QNY@&f0vGQ7*!_6!R~)Q zN*!b!Kd5@|8V8FUyLZi0n>3T`K3m$XalHIt<)JqFkByFc8(z9n#vvK@lnluc+^x>D z*?tE;PsvS=8tSuWz~fbOTUz9KI&pKZMSX|uFQS=r(0kv2Rdv@-DqU~ku{o#T-|b=l zDRjYP+p~@0wyu7;=0=IDE;s!P`X z9-|s2wkthwo>wjJnv?GRoMF9m_puGRhh2U>cHHsWt)mys%Q7rr`ioAxF1HyzcVq1( z#gCgDADv@R)A`@VbhVvOB3DulSJwt*nw7MStadlJ%=%MD)-36pyUCRMx4K`h`{c0G zyL>0-+hlP+a3W}hX$$L{8^Z=UeM|k=dd%M3wT7-~c>6=|O%C;(hxq5|zqMMAZ4El- z3cOS^Bzr;Ej$bEKIGG`&MPiX7Nu8b~xlg*j_1r{<)7Q0SPR8eS*=0pm0pzc(oxSbB z7K7$lko+8|$O!++LXP~A0`r%WlA<-3QV9+uz{NZVlSv&UBWM`FNyT>LgJnZ7jrhql zJ=BG+WAZdUombKs+700-^)pz<%1O$}YIZkZ50nw14sQefb?_N`{Z{+$@K)&ikNDzG z>iaviSbYtZe}|W`eOVd)R6n4rMq903A8&p1bowdJZ)u4w)v>0-OJN>m^-EQjM`zrz z{fyzo;`}a+-|f%J17)D8kFHvqT0QbG)Q))nD5QJ~@~|?o48{k}hR=DalP z|BLaOW*vUfR-g~^0?6eVnXz4;?udg>AFfK9-q`2*FmkETG1h1Q_Sn`d7upRW=tk#% zQ&%0{K>JV7H-ZM|S0y~9Iz~_3m>@Br(oYwc=*ZABSY~On<+RdM!dIiIRxhLRi(|kb z?O&uTlTDAkkv~F%{UmwNuP|OA5BH_)*hL)Czee-XBXb&L6!{wLlSY33iXI9w>E$c= z<{#6WvLo+v1sN$EH90V5)Y?_+L4P_SFRa7zuRz+iblwPBI&p49A5|UyTl%Ka2FB{6 zWGkgQz4TI(pUVHI_|p3PH^@NC^9S-#amm_~uadND?j&hl zuO(^cUQg2Qf0nF${#9X(4*Kh}fvnB=*wY)+zpJB6COz}Ah)3xy*6HbIjC_BJ4)Owe zX`-p?3-ZAm?clSC+HyNI+7ccbZAp> z=U4hD$fo26y>)5iEB;mpys=!$pA}MRu}-5kB{A`L zfJXc7`yYx|B|FJvru;|a8iPaQV_vDBQXS&|OHdZKX_ZG6U-A*qzx*yq+wvg#8=$hk z#Tal`Yu9Kkc51Yn?j~yE5*h#K`yB@4kuvk^{$y>$k7Vt*KxRId`97W-_b|@s1aW|4AE6i<(4WMI-64(kS_E@VVC{|a zB4pdyvLQS}?JT`NQr3;xKjKqX7q$$tE7h^}3OFz}-N+cNyqh2E{CI~r&>h#b-_`P^ zJoq?a`vd;qGaBvFFS<5Tj9(ppsPt3NNu8bw4IN(0d}W{=eEdiJY=DotUW~kG6M&c} zwCQU<$a~^tlD6S~;&-xc!*w0&H!O?(CgWMm3uC{uQk~F-{SlgZC1@AlNYsAUfG_?T zf9SOnWeoxYGTfemurMhWyO9==%LRxsEa-vy={Lm-i(yR__+9kve7mW{hp%z znvl$mJ##V!`7y6M$D@B(f2-BAcu-cxmB#?!KvB0!3uBAF#`A0XlB6v8+Zcbb{xekmHJZFk{M=)EFoEntB>uwsP_RAPf%chF z9o;U=OFoqRsyp`SdDmDxy3ZfWQ@Z^m_j4P`{ah7tKPTIp72-hhJ@`?-ucUcf#pkf@ zm#zJIdcec*93aO*xe%HXd0`Ln1zD>~_X#8qx>EY;+EDVH)K9ll$n;&=h6V=WBg!=zjs{HQ8tXJ0Ova zIev~u`zb+n-^jldnJ2;J!$*V_LX0|<>OI?Cya5a4*bgdmW~rTFX8H^E8-bgN1$%peJfcT zjJcSoeNTR~kgf%Czv;#SjCa6M#*_RkZ-;tpLugDa7{pcbH`HN0LWl<{fA>?ew##ww zJFe?>$2{6f?stA|E!ta{f9bi~kpB>uhf)l%G%+@G>!nyg8TH}NZHUi7;U314tZ~!PeCamG(`C-r3Ovak1?-KCyQtBYW!CXByHH~t z>S_D&dH8LfuVfj=}U z+O;IcEp|zELVK|gZHDz&HU_fsfMx8SR*Rh8+?v_r|d1@g0Iov2oYaiMmokq!V7(>PcFa_67V|-zt8RCf16WV->KO` zW^*08O5;+=zI2^h9({pO#3SR`6p{{4DhB>gTPYGtz#nY@uq=5-aoSal_JdJ-$q)nj ze#4XI1-;{5pHHz(&HsYGQ(^uQaKgxMcs+98I2Meh2r9dVPcK z2o3vDKRu>D_#FK%$N|64bvh2u(FFo zzG;hX-uFs<)a+M@?31k~&o%S+i;Qm+;zDUo0DtrKOpGvoRg4P(2j==WWY5-#>|wyO z(xUH)P1R(_dax`1j2&%PIxpE&*=NeQlE0+>eXJ1z?4Kz=dnEbwx94~&rttS!c;0gL?IbP4BEC;S%cqtHy`(l{eaPF2XD+k&`-jBzFHsphNJe2OI+8xzsugdn44b2HsY{>T& z4D}<|c(P}EC_Gu)0G_uL#=KlE-oLcWaNVNIIPL+;br#CvI*V>s%F}g}T@|jQay}HU zU|*x!xwuf_nHD+}F8bFHf3z`=C#?%{?3E_(Ho@B9Sq$x4!~T@b?*``n;P*+|jwIIb z_jt+wC=Wfh<#~{s(}CEK)*15%b;mXON}t{JwPAmCDDl?C!8sdncs!n$RF?Xv(NU{M zpBrjN+j00@Z1XG#@T-(d2UL~S&A$5rNAY7SSf2Yj{ST|DK;E9!O7Feklg!^A`4}W#{x_VjA&ii2BoY z$cJvjJVL~WJtC-svJ&L+rE5YBau0T%yuYaIYvbny*thL*G7aM!$2lu2H+M?1^LnuU zgy$)xgu3!|=ri3uXd@-a+7WSR8zK(Yu{co1Hu${?@i{!Ij5yezhH7m}^++EfO-Ct@ zc>KJKV-tR3(P(|1{9X)9B+uT_I*alO{-ou>Hk5~MgS9i(Bb2q%Hl|_-bOJo2jbn!H zi}YxS?O2CpgjkOd>v<@}JJur}*aBpLGPVI^%XFJ^o{!_S_tWJo%0_I(T@wTd*JrpM zz;gzA=K=1z&(8fR+B2_{wXhc}(bv_Jm7gthx>Kl`hSrtMLtnN|Um1A;4xOjVSkFU#P2dw|U=Co`hmqvDYgq5`u_W>0@7W~ZOR{2} z;6tzjNz!lVRD9^Mk#zm1c~W_h34M(aV@NCuaSLl>%1)4%?pX*OA_v=!`fz%S?Ldd7 zCFQyOZNx(;%)dwr$35^8>x4Y=$GC@fsZNfM{dp+8d!k5_(j9ajbXJq5@qZ;ES;sys zAHFR3o#-yLQ!xN~LYsI`Pvfz9z8~rju=4H%ZLd=rt>Za94JLXFCV7CM6IbT)iDDV* zF(KX{a_-9@@~+^3Gl^UpNZ>$HH;~lviEpR-!F#@NUJRcko$QcB@ z|M{2!8=-!Zy}%BF9l>5q;{DC{W81Jy)1>kFde|#W(8BvuFvlCSA9&&w@joRin@7HS zA?J6Z4;LHsnn_Jpz8-kNb0Rw)CXwrw!o7;o*D>8SjikT4P1hzuJMd!p(PcW%%%Q*w zYz|`y-cL@84FQ+6tyCvpYam@YzMNlW?*y%ec}G=OXUW#+e|p^x^ic*Ls`*h@cDZcO zuG|;s!n0wppUJI96zwbb9fzl2dwu$;_0e0eD2rq>eSP1O-$~$}9DRjQ%?>hq$>vsN z+e-ALcB}(vN#@anH25EW$CY1;h5K?ckJEIym>}}uwNxfI9m7WGfV5n%l6R&I*joO4 zI>rM+#wP}7$^Aytpz{n(S-cs6M`VC~b67`kHd3&oqV0^aEp2Be&#zm^88&1-h4YZ) zx>9AvMm}N5WUa)HcZ9UxA&>qN>(zYB)MHmpLCR`OYwXX-pZ$ZO{FF!q1nYZ3N$mWoir?jsdmU;OlYxS28sI z8U6Pt`d@gq1Lg(J-y|Lc*?B1WP)Qekbr?%WYx)=}r5e$iUHJA;tm9 zD3e_&7L@8Rex72htJe0HPbN9mKhW#)mw6y`bA62E?)3Z7#7CU+W>*%f+)2Z6szWU!v}hw}Ni(ml1@ZVL9Ie9f(sg!Lk8Pqi}ZE6)Gq{UR~uQ8pDQ8~;Kaur%4S zTqp3WTra>^l9#6=+#3RavUH%2LLKS* zja<`g$@zh3GaS!4HUYmY?v=@H$=VU=Abl3X8K|=JNCcfw7Y?G11j6`--}-UwC#MH< zwKQL&-%$ouHY_6zEVB^XVV-M3-v5WQ9M$Im!9P0jtEUg@NB2>xquU`5I20b*jQ2Im z56V;>=?8jrC-3{hb(qtc^RYBXn@rm1B=jYFRhP&f-&K-euMg8H^4`y!z2hHjL)((%y`M}0AdMZK>Te+M>gJ1C<_gZwue5NIDWXf zLWl!d4A3zl$RhU9FB@++^7|i&Z^{1#>_Y4T_kU<}4ibK}u5=r156IJaP^RO6ZIky^ zj{%w=#6TIccHc|xQN(#b$-iQq9(}2K;3muiWSznNrlxa!*KcRJ9!j{pZi@P|zT|u_ z!~qvKbRKET*ZWHRC7a>;&xkQ0*i6^|lKiLxLM7gO9p8t`vpOooi8>wBnTN0*g!97S z`F3~mjF93ur~GOtM(8>4#s{({gcu;S`RBGI-pcw?I&!{}@RjsoX>euQHf&xfv;CB4 zi`QY!w)}n*XDd>M@t?q*A)zo96l|sAf5BfM2PQ=sNITXmpSK{jBOd*`245%Gl(q%Z zC3QT1UO!Xfr!DuGZ?Y)pV<-mbc8GzhW*t-w2_2E z;~SCZ^hTQyd?>St(O--YwqBvk%PaYxzt%C=0PsEfUw{VvujDVfj?NRgAT}|{p1=!JG8Q1SYS?Q}oY zytD@3=6`aopmcU96$7;Y1smZ052q{PDME(7?Apz=flcH-|3h|S^yU45v^CuK0@54< zwC|bxZSKB+(C6lUoCE#|{}Y;ELz1|;T*z}}E-#%;X-vGKU0@B!U;p{_Kb{e(Z;Xik zCwwJ3Xk&o9{+$1Duac4%+c`ZLe;FhDui}8L(0A1NL}9&~#<_s;J=zH70V}eHLeKjs zBl5&@+923bh$Ui2Zp?E~r~`cj-*eEQEv5THlneZCCGP(#KLaA(0||2>=YPTXoIW6f zz#Hj8nfJdSBkF-Ni?Y(XQ2KK1LYbnEd^q|d4T-;@dXNv&&~xw&7UTSH%$R@}5YI0a z<^o#QU&_a>5TV(9DU%8a)?myslbQd ztHwA0d~9d^O|`MK#Il^9vc3X+Ss!W77W=^aUOL}xO~bw*j(>#L$!}2HJ|T44i4eLzDMC#DL^~4H*mQa~SWKP&XQ)|9O7Owgw;L^@ z2=by%YI4frN-0iQJ5h1~ZH_Jrp%26XUZ)eZJ<0hMU*e6M2WT6p<>9^no~MHKAipMH z<>c$>7yy6bSmq(hE%fDVCB!e-g(Som=?L{`bFp5*zM}uhJ{jzn;{8ART%7M!u6O+P zpIl1_PnmC_O!#qefqc;y2%(;nA9WNU$j|j5WylLUOL_nv_CZ`@A>EhHqnz-&Q3vua z`6$BB}?Wp-2YKR^CqzJmAvaQrLkN!f$H|A%XbKvVQFFRv*p3&_gp z0Z{M-%FXFPx05{90eylm#u@S>P~=V3Np_R#DzC@+U%Cch48Z#rX+IlFcisU1LxS}H ze_z1M%gqN0@=Ec_>keg^4aGi`ZP3Qh#@UpDYIPzV-li}Iz_&8t+d91e>2)79_o43v z2=D*kxqX~%8UKSkybgj)=xZ4yZ5YFxybuGF4q_g32K(`l=SSsfov6Ir=fI0j5>1?+ zx#xVyT+rr-M*Ha}e=n>!4wS9~bRF0TQ%Wi~55W6X$X`8fK_*PR{dir`CZrAQ2RLXO z4hm(^PpF6SBD*e&xAm+UIr|B&S+oq`S57XpFIC2I&*2F5z+dc7>gYTpyPB;~7uj6I^_6%a4zDlNW8(Ga z>V@Zd;hEl&TuRW^BWE)3-({5iDDHz(wh-&+_rf3s7@P~_{T}ssK#~dN1v$A`5N$`r z5AA0*&-p-z57DMnU!k4T8`l3M!Tmq~4FNjlF;5`Iw4j&he?m+0ji3YJ$<6(o{G^VP z0op(Y35WLwEf?Zo9beYP!*)5udpfiqFBeTy;LXXv_vQMK@s2$BYXOM?A}h=Twe}|H z#sKYC%%gpv%=(_f2_VlWRv_OcM;RrXuyuTywPC?@SC+y7`jxA7IR(-Q@ZZ|3eHw0=`_ry-P|SK_=OCUBt(JlDsSrr84R! zwNW<0*dvhVNg#}Sq0H$b%met$FTk242H;yI+<0bvYp9IAr{HPw{ZjberzE4KD_bY? zzhEyq7I=Ln9YtGma|UH=?z$`S;dCYbf%%tP|H1q(tpE9Wi`N(TgJg3V(FM-`5a(P% zLzD^fL{8fOum<4bfNN74gOXgl{-_T^w%yn=-(S~85L1Hh1%LCtC-pEFaPz-(9e^09 zO};_+D4H4<#P70qIMKgsJSf!@92h@lX_yB%`_f}WurnJkN@Y$rx-ZFdHX$^W{IBo6 zAIAWWf1aPP4gi@@E&_4v^D(UJcU@Tt4)!Ph5!X4gynf&FzUFcGJRbwdiy!NtKQs2Z zH~`=2pSvcIHf}C}Zxjg$&e5*P!~km-S();+7#~a>+((xq^m)0# zKD3@x9&E+gh1Ah=J7+_{6Y2%Ka5h8xB7`<7&)I{N#r2=U{*R*ni4Ob7`k%88WmDal zqvYr0!ZOGTx}rQ-7TU3m>m%T?d4ki_FM)g)Gm(2g6W#}h1n+BLx=-@=NCJp?DBmIP zcHJgtNQ9CE@A$wT4eZwj3F+#4{vFNB5D*7Dt5p;#~pY-}4 zc~E}i$0{QyX_v+@X;bK@8&?V3d$aueYScBEmQi2ckgwF*1>I@i>ExlWQh&M*^9TuD zug6K09Rz>Twtzg2e~b&dow767ON}n7yZ=}1REBFdoKpg4W$4Jo+1ALpKxv-_eZ%?_%f><_I#5UYbKk-vd2Zc^ zcmjmFSKm^!Wo5QR-3cWA;-3A%cJLpa=z7c>fc)IgkAHZ-jsN|hmlemo?EX*C19b7_ zuK&XCKe$%nu3;#%et+v+3xw;LJYIknCY~Og*UR}H?I-$$8ml;Fp^cJRCuXoO(n4IS zEUxhpSAdH3bbkh4M-G997!yL@mT(@hn%}rFE4POX7vq8MPwLqD8Ou0!;Wr>SN7>-{ zpQsTE^2zz~IL6HrEbdTpwzp_|v@wo%u?)7Q{Ve2R-WUA= zb;L0>j;c*v8qv;7bs>Pa5w0KjWS&VbUfp_*Um_&^-2Lui!$ zVf>T%pSu4CxiAhy{ZVcLkGvrM*R%gC*C8#whFHNl4xzNptg<_veAkk)xsGiGyQ}$< z(t(Q+(qFtDV;j9bfI7iXA{6liIp}uGBMt|txPx}YArN^(47?}bR4~(-M~Mv#`4W8( zzUFXA4C(2^k9$7Ocpviq$KU^Dz9;f4UF!}AXB7i2rG(>_TeEQTfga#bl#$;Pmgak* zO%|{6I^^Sa7uNsiXDSw0o6}{==G^&>dVOsC*yYAP(5C4FU%HN#Luf<410Ifl_)aRU z$;5qinJu7RPtxOWx*zykUgrJB-T#yE&)b!?En7~}it|5r{TJ4OjC`VRDcPV-{<{#P z6NO8!{Xu5*KkS$7C-{6gcSO=0~9>wn&+k{!is03*LY`Sue$ zH%eavkQPFY59d$$IHvh4=aunkx}e+NWGw)DbFaTAk#pc6C2QZ2bM)aXeN6EEVaNk~ zO~F^+lC-Z#KdzlT=K+vS91dxN=RjUF3F_#2mIuuraS^^0`o8>{tUXE2fP*!SWCuFW zj19q$=vT0ZWMerl7bDnCLEeV6-}tf1t^eu$AL4KDz3l#viUHVTEy=C_`2Oe{8cI5W zY~WMLuR6a8P%`rUBwV_mj3zx-2zk_-w~<6cS8vH)(57ZbZaxOuaP8tCh}zOS*m>a6K74k&x**Z^Wchyz{+i~-5_AlHm*(tQ;h1L$)R z3fH*>sg37qu~^5?<+}Fc^fJ;8C>KJc&Cw8SBh^W~p^jgZ%j@Vd zt3(57{RxQlSz2oKN_c=POVpSheLCT6olfu` z!orY5gWu87=YJggTr9x4fh71AyvM^tZ4L6gEUr0eo9gCO^ihg?gSG*CV_YfGRjT_1 zA1TT9UsuQ50p~XKJ$yG~;w25Ymw{v0C@8E8RdJbi_$KGNyGhz=-FEi9XC9`w9ATe;x!E+xrUOp^jNaSdxp=jb89;Knyx;_EBPG8K0W^N z8o=SErgC#ZDuEw=rf4UV-)qYY&$xn4l3w)MPLN&ncUo z#Q17EUZ?q5j zJdRb~e?lFOVVoNXO#e%_K)MCeEs$=3bPJ?gAl(A#7D%^1x&_iLkZyr=3#3~h-2&+r zNVh<`1=1~$Zh>?Qq+1}}0_he=w?MiD(k+l~fpiO`TOi#6=@v-0K)MC~?^+;3`d_*Q z(k+l~fpiO`TOi#6|8o|wAR0F#X%0ylGn6I&lfP!%|FZws_XpC;^fUJ9`Sf`B-;M$0 z^CbSaZSp6^L}YBTHjsR4@;Z43TXr@g{SMFnhj}>1zxQc!W*#}i1kN#mRB@+93+ID> ziP!T!)BkZ>fNrkW61Ao~c%AY6UK(2O(P;OQZ%ZU`XZrpBF~&d_(5bo)o?{`s*UrA% zhqi$8bZ?Py@h8UFzfJ?+!6NYr->GOvzDvoUxu>qTk{wLR`-2n7dv|bN&%dt2Z;mzi z4yQAb9rQw<0%Y|xRE9qA9o9|cz4UnToWnmmF8zO1Y=GAmbYdvj8$iw%i7T@^KUN66s^CA|J^GM))3e=aD(~yqv&b02k;l|Ao$Pcb5 z{PptRnXxuL~R5)Pd3fE8MxkkAZvA}K;1F^ujoa_iLrdZAC<^Aq1WFb*Gu}m zbp!e-+Kc!bXlx|cUii*D`dSY0ECb_ZG**{i!J}nsNWKFM=k&7IHnyy&JF(ZPSIJtS zLDQ7y;mihodjC7NVP%lZsL^&gk*K{xzL}<`CtLrNTwA-J0KN6;&d+_n;v=>XD?hel zJwmK!A-1y+@KA@+iQjVcs#l9u`fcTfqm$* zQXc9IC7L#k%k$v<+5CV;8>8nQyED1(qGh7#=+8^*RP2us`e8mTknN|RzuZ2+2m2%B zV?m>xsq!5%IQNK_PnVzEr}B1wE`aX=HX>`_ro_&WnkXgG-k3?SZiWPFd>Uds!n)); zmZ*=Mzg&N%Hq@==UgdK`!^pTt9JHSf9r3IuU4F1GfOFj0`Q%TRPm_5W*PJ@?DU1_Z z7s$ifIZ5Gq2;Z8d^{4ZO_`q4SM$fT-;J#%#*+&AIXx$9S$m45>?KIj%h5e;_WM7W^ zembEGP&}hskI!kIzkPq62D`wwH#y!$%7$`+z0~x-OX;5$-}81&(B4hMHC8qzP;Z0^ zu_?nR?Q^g5{ztuJP#lenZyWu7qk=o{X9lq zgZxlWPJi9_r~89kX5`FkwfLv?hdkXkt$A2)!n)0vH4)eV;=s7`cNOF^+6LtM|1aqe z_5vH$+o#ccUNy!88wbYQUx;}l{0}ne=r7DuF#j9le|C?~P+S1bI%LiLRCbQ5^4tJ6 zfZx7Q4z!^m9^9BWLVuuZ&iNn4rF?AC`iuOKCg~^6E09lP0>0JBxx(=MA@sKz#5jOH z#_I{M?al%t0+26_NLEbEhvZR7Qa@2*rL-)#?iZ@>m*UIH6*AU4p|pSLG3 z3-A%jwC<9OBYWF{){)i9#fiCLG@jzazZpM^BMqwRFY*3!8;RD|2`OQL@m%qfP zaKAa|+Cbome&YN>>QHZlsE@!~O@Fxmmwm&L^ylY#UT&o^h5E3v>BcPYAN)oI#6U@s z%8|ABnb&%3AlwT^leJ{)!%A@==!?2jx^glxHsJiPa{cG^m)j5Ym(G0_?7;Fcvdrl& zoO8_UA1~Sf_H-2I0U-vU?E~4LvlYLys~`*LYCslRe|G*8^*2C6ACHy?6SR{qCu*l& z<WF$FOp858A2_3Z&29Yds9*<-eHJpYjQT4*UxxbY>T1~5 zc4S;C>dNUxjcw2w^+pIdbRO-&)*sgAUR{^eo*6L`=u@F-eZ z{~Kom@ICK;*pq>8jjG$gNFDX*F4zh5A4Z-xQI%0ecfjZUuYCXenB1Sx--b7C9PqIL zxbPjJ>*N`PuL;RIDTdU+?@j=*U95w8Isq<+$FF~|4W6HbZ^gnqqpCZRL#+*DgaqHU zP>TQWWUa-24~xf>>ra380(syZfMsQfd})X>VjY$NVjT-<9Ha9jF6A`IZ%^bmZ94A7 zm?d#FhU_lmrckw2gO9@5I!$K?l)~x&;GUzW&Hh`hFFjII&Bm!Xj8+( zdI=^F+yw2M;6!c@9&}S&UkKy>|Lz<9>5`{gAl(A#7D%^1x&_iLkZyrrvw-X{fb_!u zoCV;Vi0UK_B1t?aqC6=uf2#Kn3ir|JxO(ol((R>wFZw@b8`#_2d_PI+NcM9_Urf>- z3{R5n&-}NJBiKLmdzM7L8Ls>M9y|wr(}>^q|5Liba~|DKN_%{;FGlaxKpEe?e*X`= zOZKaJePoF=oL?mnBw=I<0kevG3fd=fOJ%e|b{~>vi?i=zR2D}Riar`G_N4!Std_oPW);kQS%KCqT*uuoy!DcBI~-%G*%to(qdqyyB$vl77ut}P(b&qO%GESXzp z!der~GlLW-*1>Zh{COcVe(>z30)N1Pq%6M_|1w%CbYM<~bJo;s3h({4AN>mE^7v^PQ(-n|s!2j0I${X}{S{pq#}e9Dpe_z<~vsQCfT(Sh?8XuS>QL1qQHp&!VoZ2w=AA83{(=Lo=> z@sH_%@~h~eB!8`6v_IMso>hbOhdvz|>^IUzM#(Q9dl3I93kQ*gfiXb$f%QI|OQQH3 z6B!Q>OQ-|gPnJi%2pRqmKfTHLWqEM&Lm$M){&XF-8Gx`>Wa*<{UJ@N}OpCnK=%EZ$ zJH|gtAMShjXXMG;%k={&Bz)eVqNB{4f3^nRS6fEz*H|8s4k=nZvp}LF%=w(Xkhd_7 zDH|%vPp*%!*5YW<{l#|^B)>p^i6@%}8Hf%Rpo@)eOz`JLh-}Tx=ZX z_^pNZSLlO%5q2f-U>UlXW91af^jc2H-zH}Sz}Xc@UmsMHU%&n3wBc+tQ63TpI(d+b zmQx7_>jd03hTcOF>lVnlqao_$Pe$F+mL-oq%T4ky9)6D z^0WLKkY~|AH~sSqPY-;(GBi2dHw3W{>_aTSscZiiWIl%d3BWT%%TW8DpuzG$ z`J0pH;T6v_CUF=+-eH0Lu5w1s1uTz$stmHjJrC@`!h5)E>|+_9sl;bP%aCgje;qSE zj{d9;@!Weo)5z~5YG)3pJspvnswM9gq-tM&`_HD2aYT^xzjOv7C=d{X_)QlwYIjsVu!2IDgIvuL7U({^#ft*_p=MAW}|5NpFZ|fj@6T_73F^(f= zmBRNy{waP)7uI<5$Zz?u4rSNEmgL=`P2|}TIMW{S{}hBhg?&%S^FzWnA@r>U;Y`eP z`p$_+<9#69gTt8`xK7u{n{IV_Lto*_%yOQT3piWJOU-^#220c0fpH<{c0^|>8UEpuLO7eb7L;NC5_#IM-7SwV4$zD3* z0)I9Ap&rJe(q8Em5=*cLNvBifcekVD*&aBHNZ)TwtBmB2I>5W7bj*P4H_6#h@Eat^ zh(kFS2KNWxQT$I=ce%qmRRnGIjg|z0*t(40E&z8ZpL+ppT7|UOQH=N%J z&k4i)4XN#s1TFBkBYVqmE)M;UI@kf8!{p9P=>v7I{sw(V;=#+o-FJk(snkO zCwq3jWPTNGz|aKyuU4}^gA4vl^YgfbKRm;r#vkfeQ}I9dy8i1AoG}RBk6EOY$euf0 zzmVX}Cw02N<4G|A*Fg9^7VQY<{K@>q*g&5iX>9}BS0UF=c;*%P1BCAwz;pG6)>LWb zN8`hDi*T)>pA%0d8;W$g`7uT7@i3WBj|hZ>Wg6;hQ^F(o&tl-+eL27X!mWdJk^V&s zFxvtDdJ*<1U|XUrN#Z_57E)mT=&|}I^RV{cNbXxb9;W@?^(gs`jh^S=+zDguJs*2l~VJ?&$t-Jyo;c$n$dhf&4(9rfWsk1@OE9T$|Yh&tW1jc>f3X nAz + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/e2e/assets/wrong_ids/src/frontend/assets/main.css b/e2e/assets/wrong_ids/src/frontend/assets/main.css new file mode 100644 index 0000000000..1f2e7508ee --- /dev/null +++ b/e2e/assets/wrong_ids/src/frontend/assets/main.css @@ -0,0 +1,20 @@ +body { + font-family: sans-serif; +} + +a.active{color:red;} + +input[type=text], input[type=url], textarea { + width: 100%; +} + +Button.thumbs, Button.thumbs:hover { + background: none; + border: none; + padding: 0; + margin: 0; +} + +Button.thumbs.active { + background: green; +} \ No newline at end of file diff --git a/e2e/assets/wrong_ids/src/frontend/assets/nfid-logo.svg b/e2e/assets/wrong_ids/src/frontend/assets/nfid-logo.svg new file mode 100644 index 0000000000..b915816bc0 --- /dev/null +++ b/e2e/assets/wrong_ids/src/frontend/assets/nfid-logo.svg @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + diff --git a/e2e/assets/wrong_ids/src/frontend/src/.ic-assets.json5 b/e2e/assets/wrong_ids/src/frontend/src/.ic-assets.json5 new file mode 100644 index 0000000000..4515979a2c --- /dev/null +++ b/e2e/assets/wrong_ids/src/frontend/src/.ic-assets.json5 @@ -0,0 +1,56 @@ +[ + { + "match": "**/*", + "headers": { + // Security: The Content Security Policy (CSP) given below aims at working with many apps rather than providing maximal security. + // We recommend tightening the CSP for your specific application. Some recommendations are as follows: + // - Use the CSP Evaluator (https://csp-evaluator.withgoogle.com/) to validate the CSP you define. + // - Follow the “Strict CSP” recommendations (https://csp.withgoogle.com/docs/strict-csp.html). However, note that in the context of the IC, + // nonces cannot be used because the response bodies must be static to work well with HTTP asset certification. + // Thus, we recommend to include script hashes (in combination with strict-dynamic) in the CSP as described + // in https://csp.withgoogle.com/docs/faq.html in section “What if my site is static and I can't add nonces to scripts?”. + // See for example the II CSP (https://github.com/dfinity/internet-identity/blob/main/src/internet_identity/src/http.rs). + // - It is recommended to tighten the connect-src directive. With the current CSP configuration the browser can + // make requests to https://*.icp0.io, hence being able to call any canister via https://icp0.io/api/v2/canister/{canister-ID}. + // This could potentially be used in combination with another vulnerability (e.g. XSS) to exfiltrate private data. + // The developer can configure this policy to only allow requests to their specific canisters, + // e.g: connect-src 'self' https://icp-api.io/api/v2/canister/{my-canister-ID}, where {my-canister-ID} has the following format: aaaaa-aaaaa-aaaaa-aaaaa-aaa + // - It is recommended to configure style-src, style-src-elem and font-src directives with the resources your canister is going to use + // instead of using the wild card (*) option. Normally this will include 'self' but also other third party styles or fonts resources (e.g: https://fonts.googleapis.com or other CDNs) + + // Notes about the CSP below: + // - script-src 'unsafe-eval' is currently required because agent-js uses a WebAssembly module for the validation of bls signatures. + // There is currently no other way to allow execution of WebAssembly modules with CSP. + // See: https://github.com/WebAssembly/content-security-policy/blob/main/proposals/CSP.md. + // - We added img-src data: because data: images are used often. + // - frame-ancestors: none mitigates clickjacking attacks. See https://owasp.org/www-community/attacks/Clickjacking. + "Content-Security-Policy": "default-src 'self' http://localhost:8000 http://*.localhost:8000;frame-src 'self' https://nfid.one;script-src 'self' 'unsafe-eval';connect-src 'self' https://icp0.io https://*.icp0.io http://localhost:8000 http://*.localhost:8000;img-src 'self' data:;style-src * 'unsafe-inline';style-src-elem * 'unsafe-inline';font-src *;object-src 'none';base-uri 'self';frame-ancestors 'none';form-action 'self';upgrade-insecure-requests;", + + // Security: The permissions policy disables all features for security reasons. If your site needs such permissions, activate them. + // To configure permissions go here https://www.permissionspolicy.com/ + "Permissions-Policy": "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), geolocation=(), gyroscope=(), keyboard-map=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=('self'), screen-wake-lock=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=(), clipboard-read=(), clipboard-write=(), gamepad=(), speaker-selection=(), conversion-measurement=(), focus-without-user-activation=(), hid=(), idle-detection=(), interest-cohort=(), serial=(), sync-script=(), trust-token-redemption=(), window-placement=(), vertical-scroll=()", + + // Security: Mitigates clickjacking attacks. + // See: https://owasp.org/www-community/attacks/Clickjacking. + "X-Frame-Options": "DENY", + + // Security: Avoids forwarding referrer information to other origins. + // See: https://owasp.org/www-project-secure-headers/#referrer-policy. + "Referrer-Policy": "same-origin", + + // Security: Tells the user’s browser that it must always use HTTPS with your site. + // See: https://owasp.org/www-project-secure-headers/#http-strict-transport-security + "Strict-Transport-Security": "max-age=31536000; includeSubDomains", + + // Security: Prevents the browser from interpreting files as a different MIME type to what is specified in the Content-Type header. + // See: https://owasp.org/www-project-secure-headers/#x-content-type-options + "X-Content-Type-Options": "nosniff", + + // Security: Enables browser features to mitigate some of the XSS attacks. Note that it has to be in mode=block. + // See: https://owasp.org/www-community/attacks/xss/ + "X-XSS-Protection": "1; mode=block" + }, + // redirect all requests from .raw.icp0.io to .icp0.io (this redirection is the default) + "allow_raw_access": false + }, +] diff --git a/e2e/assets/wrong_ids/src/frontend/src/DataDispatcher.ts b/e2e/assets/wrong_ids/src/frontend/src/DataDispatcher.ts new file mode 100644 index 0000000000..45f4f0d136 --- /dev/null +++ b/e2e/assets/wrong_ids/src/frontend/src/DataDispatcher.ts @@ -0,0 +1,4 @@ +import { ItemDB } from "./data/Data"; + +// export const AppData = process.env.REACT_APP_USE_MOCK_DATA === "1" ? MockData : ItemDB; +export const AppData = ItemDB; // TODO: Remove this indirection. \ No newline at end of file diff --git a/e2e/assets/wrong_ids/src/frontend/src/component/AllItems.tsx b/e2e/assets/wrong_ids/src/frontend/src/component/AllItems.tsx new file mode 100644 index 0000000000..5001a99a3e --- /dev/null +++ b/e2e/assets/wrong_ids/src/frontend/src/component/AllItems.tsx @@ -0,0 +1,81 @@ +import * as React from "react"; +import { idlFactory as canDBPartitionIdl } from "../../../declarations/CanDBPartition"; +import { _SERVICE as CanDBPartition } from "../../../declarations/CanDBPartition/CanDBPartition.did"; +import { idlFactory as nacDBPartitionIdl } from "../../../declarations/NacDBPartition"; +// import { NacDBPartition } from "../../../declarations/NacDBPartition/NacDBPartition.did"; +import { idlFactory as nacDBIndexIdl } from "../../../declarations/NacDBIndex"; +import { _SERVICE as NacDBIndex } from "../../../declarations/NacDBIndex/NacDBIndex.did"; +import { Actor, Agent } from "@dfinity/agent"; +import { ItemRef, serializeItemRef } from "../data/Data"; +import { ItemTransfer } from "../../../declarations/CanDBPartition/CanDBPartition.did"; +import { Principal } from "@dfinity/principal"; +import { useState } from "react"; +import { Helmet } from "react-helmet"; +import ItemType from "./misc/ItemType"; +import Nav from "react-bootstrap/esm/Nav"; + +export function AllItems(props: {defaultAgent: Agent | undefined}) { + const [items, setItems] = useState<{order: string, id: ItemRef, item: ItemTransfer}[] | undefined>(undefined); + getItems().then(items => setItems(items)); + return <> + + Latest Added Items - Zon + + +

Latest Added Items - Zon

+ {items === undefined ?

Loading...

: +
} + {/* TODO: Load More button */} + ; +} + +// TODO: duplicate code +async function aList(opts?: {lowerBound?: string, limit?: number}) + : Promise<{order: string, id: ItemRef, item: ItemTransfer}[]> +{ + const nacDBIndex: NacDBIndex = Actor.createActor(nacDBIndexIdl, {canisterId: process.env.CANISTER_ID_NACDBINDEX!, agent: this.agent }); + const order = await nacDBIndex.getAllItemsStream(); + + const {lowerBound, limit} = opts !== undefined ? opts : {lowerBound: "", limit: 500}; + // const client: NacDBPartition = Actor.createActor(nacDBPartitionIdl, {canisterId: outerCanister, agent: this.agent }); + // const {canister: innerPart, key: innerKey} = (await client.getInner({outerKey}) as any)[0]; // TODO: error handling + const client2 = Actor.createActor(nacDBPartitionIdl, {canisterId: Principal.from(order.order[0]).toText(), agent: this.agent }); + const items = ((await client2.scanLimitOuter({outerKey: order.order[1], lowerBound, upperBound: "x", dir: {fwd: null}, limit: BigInt(limit)})) as any).results as + [[string, {text: string}]] | []; + const items1aa = items.length === 0 ? [] : items.map(x => ({key: x[0], text: x[1].text})); + const items1a: {order: string, principal: string, id: number}[] = items1aa.map(x => { + const m = x.text.match(/^([0-9]*)@(.*)$/); + return {order: x.key, principal: m![2], id: Number(m![1])}; + }); + const items2 = items1a.map(({order, principal, id}) => { return {canister: Principal.from(principal), id, order} }); + const items3 = items2.map(id => (async () => { + const part: CanDBPartition = Actor.createActor(canDBPartitionIdl, {canisterId: id.canister, agent: this.agent }); + return {order: id.order, id, item: await part.getItem(BigInt(id.id))}; + })()); + const items4 = await Promise.all(items3); + return items4.map(({order, id, item}) => ({ + order, + id, + item: item[0]!, + })); +} + +function _unwrap(v: T[]): T | undefined { + // TODO: simplify for greater performance + return v === undefined || v.length === 0 ? undefined : v[0]; +} + +async function getItems(opts?: {lowerBound?: string, limit?: number}): Promise<{order: string, id: ItemRef, item: ItemTransfer}[]> { + const {lowerBound, limit} = opts !== undefined ? opts : {lowerBound: "", limit: 5}; + if (this.agent === undefined) { + return undefined; + } + return await this.aList({lowerBound, limit}) +} diff --git a/e2e/assets/wrong_ids/src/frontend/src/component/App.tsx b/e2e/assets/wrong_ids/src/frontend/src/component/App.tsx new file mode 100644 index 0000000000..6161f7ed86 --- /dev/null +++ b/e2e/assets/wrong_ids/src/frontend/src/component/App.tsx @@ -0,0 +1,207 @@ +import * as React from "react"; +import 'bootstrap/dist/css/bootstrap.min.css'; +import { createContext, useContext, useEffect, useState } from "react"; +import { Button, Container, Nav, Navbar } from 'react-bootstrap'; +import ShowItem from "./ShowItem"; +import { + BrowserRouter as Router, + Route, + Routes, + NavLink, + useNavigate, + HashRouter, + useParams, +} from "react-router-dom"; +import { Actor, Agent, getDefaultAgent } from '@dfinity/agent'; +import SubFolders from "./SubFolders"; +import EditItem from "./EditItem"; +import EditFolder from "./EditFolder"; +import { getIsLocal } from "../util/client"; +import { serializeItemRef } from '../data/Data' +import { Principal } from "@dfinity/principal"; +import { AuthContext, AuthProvider, useAuth } from './auth/use-auth-client' +import { idlFactory as mainIdlFactory } from "../../../declarations/main"; +import { ZonBackend } from "../../../declarations/main/main.did"; +import { Helmet } from 'react-helmet'; +import Person from "./personhood/Person"; +import { AllItems } from "./AllItems"; + +export const BusyContext = createContext(undefined); + +export default function App() { + const identityCanister = process.env.CANISTER_ID_INTERNET_IDENTITY; + const identityProvider = getIsLocal() ? `http://${identityCanister}.localhost:8000` : `https://identity.ic0.app`; + const [busy, setBusy] = useState(false); + return ( + <> + + Zon Social Media - the world as items in folders + + + +

+ It is a preliminary alpha-test version. All data is likely to be deleted before the release. +

+

Zon Social Network

+ { + console.log('Login Successful!'); + }, + onError: (error) => { + console.error('Login Failed: ', error); + }, + }}}> + + + {busy ?

Processing...

: + + {({defaultAgent}) => } + + } +
+
+
+
+ + ); +} + +function MyRouted(props: {defaultAgent: Agent | undefined}) { + const navigate = useNavigate(); + const [root, setRoot] = useState(""); + async function fetchRootItem() { + const MainCanister: ZonBackend = Actor.createActor(mainIdlFactory, {canisterId: process.env.CANISTER_ID_MAIN!, agent: props.defaultAgent}) + const data0 = await MainCanister.getRootItem(); + const [data] = data0; // TODO: We assume that it's initialized. + let [part, id] = data! as [Principal, bigint]; + let item = { canister: part, id: Number(id) }; + setRoot(serializeItemRef(item)); + } + fetchRootItem().then(() => {}); + function RootRedirector(props: {root: string}) { + useEffect(() => { + if (root !== "") { + navigate("/item/"+root); + } + }, [root]); + return ( +

Loading...

+ ); + } + const contextValue = useAuth(); + return ( + + {({isAuthenticated, principal, authClient, defaultAgent, options, login, logout}) => { + const signin = () => { + login!(); // TODO: `!` + }; + const signout = async () => { + await logout!(); // TODO: `!` + }; + return <> +

+ Logged in as: {isAuthenticated ? {principal?.toString()} : "(none)"}{" "} + {isAuthenticated ? : } +

+ + + } + /> + } + /> + } + /> + } + /> + } + /> + } + /> + } + /> + } + /> + { + function Edit(props) { + const routeParams = useParams(); + return ; + } + return ; + })() + } + /> + } + /> + { + function Edit(props) { + const routeParams = useParams(); + return ; + } + return ; + })() + } + /> + { + function Edit(props) { + const routeParams = useParams(); + return ; + } + return ; + })() + } + /> + } + /> + + + }} +
+ ); +} \ No newline at end of file diff --git a/e2e/assets/wrong_ids/src/frontend/src/component/EditFolder.tsx b/e2e/assets/wrong_ids/src/frontend/src/component/EditFolder.tsx new file mode 100644 index 0000000000..2b9bc4b8f1 --- /dev/null +++ b/e2e/assets/wrong_ids/src/frontend/src/component/EditFolder.tsx @@ -0,0 +1,150 @@ +import * as React from "react"; +import { useEffect, useState } from "react"; +import { Button } from "react-bootstrap"; +import { useNavigate, useParams } from "react-router-dom"; +import { Tab, TabList, TabPanel, Tabs } from "react-tabs"; +import { Helmet } from 'react-helmet'; +import { idlFactory as mainIdlFactory } from "../../../declarations/main"; +import { ItemDataWithoutOwner, ItemTransferWithoutOwner, ZonBackend } from "../../../declarations/main/main.did"; +import { idlFactory as canDBPartitionIdlFactory } from "../../../declarations/CanDBPartition"; +import { CanDBPartition } from "../../../declarations/CanDBPartition/CanDBPartition.did"; +import EditFoldersList from "./EditFoldersList"; +import { addToFolder, addToMultipleFolders } from "../util/folder"; +import { parseItemRef, serializeItemRef } from "../data/Data"; +import { AuthContext } from "./auth/use-auth-client"; +import { BusyContext } from "./App"; +import { Actor, Agent } from "@dfinity/agent"; + +export default function EditFolder(props: {super?: boolean, folderId?: string, superFolderId?: string, defaultAgent: Agent | undefined}) { + const navigate = useNavigate(); + const [superFolder, setSuperFolder] = useState(); + const [foldersList, setFoldersList] = useState<[string, 'beginning' | 'end'][]>([]); + const [antiCommentsList, setAntiCommentsList] = useState<[string, 'beginning' | 'end'][]>([]); + useEffect(() => { + setSuperFolder(props.superFolderId); + }, [props.superFolderId]); + enum FolderKind { owned, communal }; + const [folderKind, setFolderKind] = useState(FolderKind.owned); + const [locale, setLocale] = useState('en'); // TODO: user's locale + const [title, setTitle] = useState(""); + const [shortDescription, setShortDescription] = useState(""); + useEffect(() => { + if (props.folderId !== undefined) { + const folderId = parseItemRef(props.folderId); + const actor: CanDBPartition = Actor.createActor(canDBPartitionIdlFactory, {canisterId: folderId.canister, agent: props.defaultAgent}); + actor.getItem(BigInt(folderId.id)) + .then((itemx) => { + const item = itemx[0] ? itemx[0][0]!.data : undefined; + const communal = itemx[0]?.communal; // TODO: Simplify. + setFolderKind(communal ? FolderKind.communal : FolderKind.owned); + setLocale(item!.locale); + setTitle(item!.title); + setShortDescription(item!.description); + }); + } + }, [props.folderId]); + function onSelectTab(index: number) { + switch (index) { + case 0: + setFolderKind(FolderKind.owned); + break; + case 1: + setFolderKind(FolderKind.communal); + break; + } + } + return ( + + {({setBusy}) => + + {({agent, isAuthenticated}) => { + async function submit() { + function itemData(): ItemDataWithoutOwner { + return { + locale, + title, + description: shortDescription, + details: {folder: null}, + price: 0.0, // TODO + }; + } + async function submitItem(item: ItemDataWithoutOwner) { + const backend: ZonBackend = Actor.createActor(mainIdlFactory, {canisterId: process.env.CANISTER_ID_MAIN!, agent}); + let part, n; + if (props.folderId !== undefined) { + const folder = parseItemRef(props.folderId); // TODO: not here + await backend.setItemData(folder.canister, BigInt(folder.id), item); + part = folder.canister; + n = BigInt(folder.id); + } else { + const transfer: ItemTransferWithoutOwner = {data: item, communal: folderKind == FolderKind.communal}; + [part, n] = await backend.createItemData(transfer); + } + const ref = serializeItemRef({canister: part, id: Number(n)}); // TODO: Reduce code + if (!(props.super === true)) { // noComments + await addToMultipleFolders(agent!, foldersList, {canister: part, id: Number(n)}, false); + await addToMultipleFolders(agent!, antiCommentsList, {canister: part, id: Number(n)}, true); + } else { + for (const folder of foldersList) { + // TODO: It may fail to parse. + await addToFolder(agent!, {canister: part, id: Number(n)}, parseItemRef(folder[0]), false, folder[1]); + } + } + navigate("/item/"+ref); + } + setBusy(true); + await submitItem(itemData()); + setBusy(false); + } + async function remove() { + if (!window.confirm("Really delete?")) { + return; + } + const backend: ZonBackend = Actor.createActor(mainIdlFactory, {canisterId: process.env.CANISTER_ID_MAIN!, agent}); + const folder = parseItemRef(props.folderId!); // TODO: not here + await backend.removeItem(folder.canister, BigInt(folder.id)); + navigate("/"); + } + return <> + + Zon Social Media - create a new folder + +

{props.folderId !== undefined ? `Edit folder` : + props.super === true ? `Create superfolder` : `Create subfolder`}

+ + + Owned + Communal + + +

Owned folders have an owner (you). Only the owner can add, delete, and reoder items in an owned folder,{" "} + or rename the folder.

+
+ +

Communal folders have no owner. Anybody can add an item to a communal folder.{" "} + Nobody can delete an item from a communal folder or rename the folder. Ordering is determined by voting.

+
+
+

Language: setLocale(e.target.value)}/>

+

Title: setTitle(e.target.value)}/>

+

Short (meta) description: