Skip to content

Commit

Permalink
Instrument on:
Browse files Browse the repository at this point in the history
- async
- threaded
- par_
- send on channels

Missing:
- create fn from closures that use above and instrument them for smaller scopes

Signed-off-by: simonsan <14062932+simonsan@users.noreply.github.com>
  • Loading branch information
simonsan committed Oct 29, 2024
1 parent 62aa898 commit e95b61b
Show file tree
Hide file tree
Showing 14 changed files with 40 additions and 0 deletions.
1 change: 1 addition & 0 deletions crates/backend/src/opendal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,7 @@ impl ReadBackend for OpenDALBackend {

impl WriteBackend for OpenDALBackend {
/// Create a repository on the backend.
#[tracing::instrument(skip(self))]
fn create(&self) -> Result<()> {
trace!("creating repo at {:?}", self.location());

Expand Down
1 change: 1 addition & 0 deletions crates/backend/src/rclone.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ impl RcloneBackend {
///
/// If the rclone command is not found.
// TODO: This should be an error, not a panic.
#[tracing::instrument(skip(url))]
pub fn new(url: impl AsRef<str>, options: HashMap<String, String>) -> Result<Self> {
let rclone_command = options.get("rclone-command");
let use_password = options
Expand Down
6 changes: 6 additions & 0 deletions crates/backend/src/rest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,7 @@ impl ReadBackend for RestBackend {
/// A vector of tuples containing the id and size of the files.
///
/// [`RestErrorKind::JoiningUrlFailed`]: RestErrorKind::JoiningUrlFailed
#[tracing::instrument(skip(self))]
fn list_with_size(&self, tpe: FileType) -> Result<Vec<(Id, u32)>> {
// format which is delivered by the REST-service
#[derive(Deserialize)]
Expand Down Expand Up @@ -313,6 +314,7 @@ impl ReadBackend for RestBackend {
/// * [`RestErrorKind::BackoffError`] - If the backoff failed.
///
/// [`RestErrorKind::BackoffError`]: RestErrorKind::BackoffError
#[tracing::instrument(skip(self))]
fn read_full(&self, tpe: FileType, id: &Id) -> Result<Bytes> {
trace!("reading tpe: {tpe:?}, id: {id}");
let url = self.url(tpe, id)?;
Expand Down Expand Up @@ -346,6 +348,7 @@ impl ReadBackend for RestBackend {
/// * [`RestErrorKind::BackoffError`] - If the backoff failed.
///
/// [`RestErrorKind::BackoffError`]: RestErrorKind::BackoffError
#[tracing::instrument(skip(self, _cacheable))]
fn read_partial(
&self,
tpe: FileType,
Expand Down Expand Up @@ -383,6 +386,7 @@ impl WriteBackend for RestBackend {
/// * [`RestErrorKind::BackoffError`] - If the backoff failed.
///
/// [`RestErrorKind::BackoffError`]: RestErrorKind::BackoffError
#[tracing::instrument(skip(self))]
fn create(&self) -> Result<()> {
let url = self
.url
Expand Down Expand Up @@ -413,6 +417,7 @@ impl WriteBackend for RestBackend {
/// * [`RestErrorKind::BackoffError`] - If the backoff failed.
///
/// [`RestErrorKind::BackoffError`]: RestErrorKind::BackoffError
#[tracing::instrument(skip(self, _cacheable))]
fn write_bytes(&self, tpe: FileType, id: &Id, _cacheable: bool, buf: Bytes) -> Result<()> {
trace!("writing tpe: {:?}, id: {}", &tpe, &id);
let req_builder = self.client.post(self.url(tpe, id)?).body(buf);
Expand Down Expand Up @@ -441,6 +446,7 @@ impl WriteBackend for RestBackend {
/// * [`RestErrorKind::BackoffError`] - If the backoff failed.
///
/// [`RestErrorKind::BackoffError`]: RestErrorKind::BackoffError
#[tracing::instrument(skip(self, _cacheable))]
fn remove(&self, tpe: FileType, id: &Id, _cacheable: bool) -> Result<()> {
trace!("removing tpe: {:?}, id: {}", &tpe, &id);
let url = self.url(tpe, id)?;
Expand Down
1 change: 1 addition & 0 deletions crates/core/src/archiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Archiver<'a, BE, I> {
/// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed
/// [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`]: crate::error::CryptBackendErrorKind::SerializingToJsonByteVectorFailed
/// [`SnapshotFileErrorKind::OutOfRange`]: crate::error::SnapshotFileErrorKind::OutOfRange
#[tracing::instrument(skip(self, src, p))]
pub fn archive<R>(
mut self,
src: &R,
Expand Down
3 changes: 3 additions & 0 deletions crates/core/src/backend/decrypt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,7 @@ pub trait DecryptReadBackend: ReadBackend + Clone + 'static {
/// # Errors
///
/// If the files could not be read.
#[tracing::instrument(skip(self, p))]
fn stream_list<F: RepoFile>(&self, list: &[Id], p: &impl Progress) -> StreamResult<F::Id, F> {
p.set_length(list.len() as u64);
let (tx, rx) = unbounded();
Expand Down Expand Up @@ -279,6 +280,7 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static {
/// # Errors
///
/// * [`CryptBackendErrorKind::SerializingToJsonByteVectorFailed`] - If the file could not be serialized to json.
#[tracing::instrument(skip(self, list, p))]
fn save_list<'a, F: RepoFile, I: ExactSizeIterator<Item = &'a F> + Send>(
&self,
list: I,
Expand Down Expand Up @@ -306,6 +308,7 @@ pub trait DecryptWriteBackend: WriteBackend + Clone + 'static {
/// # Panics
///
/// If the files could not be deleted.
#[tracing::instrument(skip(self, list, p))]
fn delete_list<'a, ID: RepoId, I: ExactSizeIterator<Item = &'a ID> + Send>(
&self,
cacheable: bool,
Expand Down
5 changes: 5 additions & 0 deletions crates/core/src/blob/packer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,7 @@ impl<BE: DecryptWriteBackend> Packer<BE> {
/// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed
/// [`PackerErrorKind::IntConversionFailed`]: crate::error::PackerErrorKind::IntConversionFailed
#[allow(clippy::unnecessary_wraps)]
#[tracing::instrument(skip(be, indexer))]
pub fn new(
be: BE,
blob_type: BlobType,
Expand Down Expand Up @@ -295,6 +296,7 @@ impl<BE: DecryptWriteBackend> Packer<BE> {
/// * [`PackerErrorKind::SendingCrossbeamMessageFailed`] - If sending the message to the raw packer fails.
///
/// [`PackerErrorKind::SendingCrossbeamMessageFailed`]: crate::error::PackerErrorKind::SendingCrossbeamMessageFailed
#[tracing::instrument(skip(self))]
fn add_with_sizelimit(
&self,
data: Bytes,
Expand Down Expand Up @@ -602,6 +604,7 @@ impl<BE: DecryptWriteBackend> RawPacker<BE> {
///
/// [`PackerErrorKind::IntConversionFailed`]: crate::error::PackerErrorKind::IntConversionFailed
/// [`PackFileErrorKind::WritingBinaryRepresentationFailed`]: crate::error::PackFileErrorKind::WritingBinaryRepresentationFailed
#[tracing::instrument(skip(self))]
fn save(&mut self) -> RusticResult<()> {
if self.size == 0 {
return Ok(());
Expand Down Expand Up @@ -677,6 +680,7 @@ impl Actor {
/// * `fwh` - The file writer handle.
/// * `queue_len` - The length of the queue.
/// * `par` - The number of parallel threads.
#[tracing::instrument(skip(fwh, _par))]
fn new<BE: DecryptWriteBackend>(
fwh: FileWriterHandle<BE>,
queue_len: usize,
Expand Down Expand Up @@ -718,6 +722,7 @@ impl Actor {
/// # Errors
///
/// If sending the message to the actor fails.
#[tracing::instrument(skip(self))]
fn send(&self, load: (Bytes, IndexPack)) -> RusticResult<()> {
self.sender
.send(load)
Expand Down
2 changes: 2 additions & 0 deletions crates/core/src/blob/tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -671,6 +671,7 @@ impl<P: Progress> TreeStreamerOnce<P> {
/// * [`TreeErrorKind::SendingCrossbeamMessageFailed`] - If sending the message fails.
///
/// [`TreeErrorKind::SendingCrossbeamMessageFailed`]: crate::error::TreeErrorKind::SendingCrossbeamMessageFailed
#[tracing::instrument(skip(be, index, p))]
pub fn new<BE: DecryptReadBackend, I: ReadGlobalIndex>(
be: &BE,
index: &I,
Expand Down Expand Up @@ -733,6 +734,7 @@ impl<P: Progress> TreeStreamerOnce<P> {
/// * [`TreeErrorKind::SendingCrossbeamMessageFailed`] - If sending the message fails.
///
/// [`TreeErrorKind::SendingCrossbeamMessageFailed`]: crate::error::TreeErrorKind::SendingCrossbeamMessageFailed
#[tracing::instrument(skip(self))]
fn add_pending(&mut self, path: PathBuf, id: TreeId, count: usize) -> RusticResult<bool> {
if self.visited.insert(id) {
self.queue_in
Expand Down
2 changes: 2 additions & 0 deletions crates/core/src/commands/check.rs
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,7 @@ pub struct CheckOptions {
/// # Panics
///
// TODO: Add panics
#[tracing::instrument(skip(repo))]
pub(crate) fn check_repository<P: ProgressBars, S: Open>(
repo: &Repository<P, S>,
opts: CheckOptions,
Expand Down Expand Up @@ -350,6 +351,7 @@ fn check_hot_files(
/// # Errors
///
/// If a file is missing or has a different size
#[tracing::instrument(skip(be, p, _concurrency))]
fn check_cache_files(
_concurrency: usize,
cache: &Cache,
Expand Down
1 change: 1 addition & 0 deletions crates/core/src/commands/copy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ pub struct CopySnapshot {
/// # Errors
///
// TODO: Document errors
#[tracing::instrument(skip(repo, repo_dest, snapshots))]
pub(crate) fn copy<'a, Q, R: IndexedFull, P: ProgressBars, S: IndexedIds>(
repo: &Repository<Q, R>,
repo_dest: &Repository<P, S>,
Expand Down
1 change: 1 addition & 0 deletions crates/core/src/commands/prune.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1182,6 +1182,7 @@ impl PrunePlan {
/// TODO! In weird circumstances, should be fixed.
#[allow(clippy::significant_drop_tightening)]
#[allow(clippy::too_many_lines)]
#[tracing::instrument(skip(repo))]
pub(crate) fn prune_repository<P: ProgressBars, S: Open>(
repo: &Repository<P, S>,
opts: &PruneOptions,
Expand Down
1 change: 1 addition & 0 deletions crates/core/src/commands/restore.rs
Original file line number Diff line number Diff line change
Expand Up @@ -430,6 +430,7 @@ pub(crate) fn set_metadata(
/// [`CommandErrorKind::ErrorSettingLength`]: crate::error::CommandErrorKind::ErrorSettingLength
/// [`CommandErrorKind::FromRayonError`]: crate::error::CommandErrorKind::FromRayonError
#[allow(clippy::too_many_lines)]
#[tracing::instrument(skip(repo))]
fn restore_contents<P: ProgressBars, S: Open>(
repo: &Repository<P, S>,
dest: &LocalDestination,
Expand Down
2 changes: 2 additions & 0 deletions crates/core/src/index/binarysorted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ impl IndexCollector {
}

// Turns Collector into an index by sorting the entries by ID.
#[tracing::instrument(skip(self))]
#[must_use]
pub fn into_index(self) -> Index {
Index(self.0.map(|_, mut tc| {
Expand Down Expand Up @@ -223,6 +224,7 @@ impl IntoIterator for Index {
type IntoIter = PackIndexes;

// Turns Collector into an iterator yielding PackIndex by sorting the entries by pack.
#[tracing::instrument(skip(self))]
fn into_iter(mut self) -> Self::IntoIter {
for tc in self.0.values_mut() {
if let EntriesVariants::FullEntries(entries) = &mut tc.entries {
Expand Down
1 change: 1 addition & 0 deletions crates/core/src/repository/warm_up.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ fn warm_up_command<P: ProgressBars>(
/// * [`RepositoryErrorKind::FromThreadPoolbilderError`] - If the thread pool could not be created.
///
/// [`RepositoryErrorKind::FromThreadPoolbilderError`]: crate::error::RepositoryErrorKind::FromThreadPoolbilderError
#[tracing::instrument(skip(repo, packs))]
fn warm_up_repo<P: ProgressBars, S>(
repo: &Repository<P, S>,
packs: impl ExactSizeIterator<Item = PackId>,
Expand Down
13 changes: 13 additions & 0 deletions crates/core/src/vfs/webdavfs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,10 +132,12 @@ impl<P, S: IndexedFull> Clone for WebDavFS<P, S> {
impl<P: Debug + Send + Sync + 'static, S: IndexedFull + Debug + Send + Sync + 'static> DavFileSystem
for WebDavFS<P, S>
{
#[tracing::instrument(skip(self))]
fn metadata<'a>(&'a self, davpath: &'a DavPath) -> FsFuture<'_, Box<dyn DavMetaData>> {

Check warning on line 136 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking x86_64-pc-windows-msvc

elided lifetime has a name

Check warning on line 136 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking x86_64-pc-windows-gnu

elided lifetime has a name

Check warning on line 136 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking x86_64-apple-darwin

elided lifetime has a name

Check warning on line 136 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking aarch64-apple-darwin

elided lifetime has a name

Check warning on line 136 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking x86_64-unknown-linux-gnu

elided lifetime has a name

Check warning on line 136 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking x86_64-unknown-linux-musl

elided lifetime has a name
self.symlink_metadata(davpath)
}

#[tracing::instrument(skip(self))]
fn symlink_metadata<'a>(&'a self, davpath: &'a DavPath) -> FsFuture<'_, Box<dyn DavMetaData>> {

Check warning on line 141 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking x86_64-pc-windows-msvc

elided lifetime has a name

Check warning on line 141 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking x86_64-pc-windows-gnu

elided lifetime has a name

Check warning on line 141 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking x86_64-apple-darwin

elided lifetime has a name

Check warning on line 141 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking aarch64-apple-darwin

elided lifetime has a name

Check warning on line 141 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking x86_64-unknown-linux-gnu

elided lifetime has a name

Check warning on line 141 in crates/core/src/vfs/webdavfs.rs

View workflow job for this annotation

GitHub Actions / Cross checking x86_64-unknown-linux-musl

elided lifetime has a name
async move {
let node = self.node_from_path(davpath)?;
Expand All @@ -145,6 +147,7 @@ impl<P: Debug + Send + Sync + 'static, S: IndexedFull + Debug + Send + Sync + 's
.boxed()
}

#[tracing::instrument(skip(self, _meta))]
fn read_dir<'a>(
&'a self,
davpath: &'a DavPath,
Expand All @@ -162,6 +165,7 @@ impl<P: Debug + Send + Sync + 'static, S: IndexedFull + Debug + Send + Sync + 's
.boxed()
}

#[tracing::instrument(skip(self))]
fn open<'a>(
&'a self,
path: &'a DavPath,
Expand Down Expand Up @@ -204,6 +208,7 @@ impl<P: Debug + Send + Sync + 'static, S: IndexedFull + Debug + Send + Sync + 's
struct DavFsDirEntry(Node);

impl DavDirEntry for DavFsDirEntry {
#[tracing::instrument(skip(self))]
fn metadata(&self) -> FsFuture<'_, Box<dyn DavMetaData>> {
async move {
let meta: Box<dyn DavMetaData> = Box::new(DavFsMetaData(self.0.clone()));
Expand All @@ -213,11 +218,13 @@ impl DavDirEntry for DavFsDirEntry {
}

#[cfg(not(windows))]
#[tracing::instrument(skip(self))]
fn name(&self) -> Vec<u8> {
self.0.name().as_bytes().to_vec()
}

#[cfg(windows)]
#[tracing::instrument(skip(self))]
fn name(&self) -> Vec<u8> {
self.0
.name()
Expand Down Expand Up @@ -252,6 +259,7 @@ impl<P, S> Debug for DavFsFile<P, S> {
}

impl<P: Debug + Send + Sync, S: IndexedFull + Debug + Send + Sync> DavFile for DavFsFile<P, S> {
#[tracing::instrument(skip(self))]
fn metadata(&mut self) -> FsFuture<'_, Box<dyn DavMetaData>> {
async move {
let meta: Box<dyn DavMetaData> = Box::new(DavFsMetaData(self.node.clone()));
Expand All @@ -260,14 +268,17 @@ impl<P: Debug + Send + Sync, S: IndexedFull + Debug + Send + Sync> DavFile for D
.boxed()
}

#[tracing::instrument(skip(self, _buf))]
fn write_bytes(&mut self, _buf: Bytes) -> FsFuture<'_, ()> {
async move { Err(FsError::Forbidden) }.boxed()
}

#[tracing::instrument(skip(self, _buf))]
fn write_buf(&mut self, _buf: Box<dyn Buf + Send>) -> FsFuture<'_, ()> {
async move { Err(FsError::Forbidden) }.boxed()
}

#[tracing::instrument(skip(self))]
fn read_bytes(&mut self, count: usize) -> FsFuture<'_, Bytes> {
async move {
let data = self
Expand All @@ -281,6 +292,7 @@ impl<P: Debug + Send + Sync, S: IndexedFull + Debug + Send + Sync> DavFile for D
.boxed()
}

#[tracing::instrument(skip(self))]
fn seek(&mut self, pos: SeekFrom) -> FsFuture<'_, u64> {
async move {
match pos {
Expand All @@ -306,6 +318,7 @@ impl<P: Debug + Send + Sync, S: IndexedFull + Debug + Send + Sync> DavFile for D
.boxed()
}

#[tracing::instrument(skip(self))]
fn flush(&mut self) -> FsFuture<'_, ()> {
async move { Ok(()) }.boxed()
}
Expand Down

0 comments on commit e95b61b

Please sign in to comment.