diff --git a/mcrouter/ProxyRequestLogger-inl.h b/mcrouter/ProxyRequestLogger-inl.h index 51a708da4..22e44a1a9 100644 --- a/mcrouter/ProxyRequestLogger-inl.h +++ b/mcrouter/ProxyRequestLogger-inl.h @@ -34,6 +34,10 @@ void ProxyRequestLogger::log( const auto durationUs = loggerContext.endTimeUs - loggerContext.startTimeUs; proxy_.stats().durationUs().insertSample(durationUs); logDurationByRequestType(durationUs); + + proxy_.stats().processingTimeUs().insertSample( + loggerContext.startTimeUs - loggerCreatedTimeUs_ - + loggerContext.networkTransportTimeUs); } template diff --git a/mcrouter/ProxyRequestLogger.h b/mcrouter/ProxyRequestLogger.h index fd74574f6..c819fe6a9 100644 --- a/mcrouter/ProxyRequestLogger.h +++ b/mcrouter/ProxyRequestLogger.h @@ -8,6 +8,7 @@ #pragma once #include "mcrouter/McrouterFiberContext.h" +#include "mcrouter/config.h" #include "mcrouter/lib/carbon/Result.h" #include "mcrouter/lib/carbon/RoutingGroups.h" @@ -22,7 +23,8 @@ struct RequestLoggerContext; template class ProxyRequestLogger { public: - explicit ProxyRequestLogger(Proxy& proxy) : proxy_(proxy) {} + explicit ProxyRequestLogger(Proxy& proxy) + : proxy_(proxy), loggerCreatedTimeUs_(nowUs()) {} template void log(const RequestLoggerContext& loggerContext); @@ -45,6 +47,8 @@ class ProxyRequestLogger { void logDurationByRequestType( uint64_t durationUs, carbon::OtherThanT, carbon::UpdateLike<>> = 0); + + int64_t loggerCreatedTimeUs_ = 0; }; } // namespace mcrouter } // namespace memcache diff --git a/mcrouter/ProxyStats.h b/mcrouter/ProxyStats.h index c867a2695..4e35b9a36 100644 --- a/mcrouter/ProxyStats.h +++ b/mcrouter/ProxyStats.h @@ -68,6 +68,10 @@ class ProxyStats { return axonProxyDurationUs_; } + ExponentialSmoothData<64>& processingTimeUs() { + return processingTimeUs_; + } + size_t numBinsUsed() const { return numBinsUsed_; } @@ -208,6 +212,9 @@ class ProxyStats { // Time spent for axonlog writing ExponentialSmoothData<64> axonProxyDurationUs_; + // Time spent in mcrouter, excluding time spent over network. + ExponentialSmoothData<64> processingTimeUs_; + // we are wasting some memory here to get faster mapping from stat name to // statsBin_[] and statsNumWithinWindow_[] entry. i.e., the statsBin_[] // and statsNumWithinWindow_[] entry for non-rate stat are not in use. diff --git a/mcrouter/stat_list.h b/mcrouter/stat_list.h index 64fc23c96..a94158a4a 100644 --- a/mcrouter/stat_list.h +++ b/mcrouter/stat_list.h @@ -325,6 +325,8 @@ STAT(client_queue_notify_period, stat_double, 0, .dbl = 0.0) STAT(asynclog_duration_us, stat_double, 0, .dbl = 0.0) // avg time spent appending to axon proxy STAT(axon_proxy_duration_us, stat_double, 0, .dbl = 0.0) +// Time spent in mcrouter, excluding time spent over network. +STAT(processing_time_us, stat_double, 0, .dbl = 0.0) #undef GROUP #define GROUP ods_stats | basic_stats | max_stats STUI(destination_max_pending_reqs, 0, 1) diff --git a/mcrouter/stats.cpp b/mcrouter/stats.cpp index 15e670961..4fb5fe357 100644 --- a/mcrouter/stats.cpp +++ b/mcrouter/stats.cpp @@ -583,6 +583,8 @@ void prepare_stats(CarbonRouterInstanceBase& router, stat_t* stats) { std::min( i ? stat_get_uint64(stats, proxy_queue_full_stat) : 0, static_cast(pr->messageQueueFull() ? 1 : 0))); + stat_incr( + stats, processing_time_us_stat, pr->stats().processingTimeUs().value()); } if (router.opts().num_proxies > 0) { @@ -610,6 +612,8 @@ void prepare_stats(CarbonRouterInstanceBase& router, stat_t* stats) { stats, asynclog_duration_us_stat, router.opts().num_proxies); stat_div_internal( stats, axon_proxy_duration_us_stat, router.opts().num_proxies); + stat_div_internal( + stats, processing_time_us_stat, router.opts().num_proxies); } for (int i = 0; i < num_stats; i++) { diff --git a/mcrouter/test/test_mcrouter_processing_time.py b/mcrouter/test/test_mcrouter_processing_time.py index 8cbf664ae..e93383f87 100644 --- a/mcrouter/test/test_mcrouter_processing_time.py +++ b/mcrouter/test/test_mcrouter_processing_time.py @@ -37,11 +37,12 @@ def tearDown(self) -> None: def test_mcrouter_processing_time(self) -> None: self.assertEqual(self.mcrouter.leaseGet("key1"), {"value": "", "token": 0}) time.sleep(0.5) + stat = self.mcrouter.stats() self.mcrouter.terminate() scuba_samples = open(self.debug_file, "r").readlines() # One error sampler plus three main sampler. self.assertEqual(len(scuba_samples), 4) - + self.assertGreaterEqual(stat["processing_time_us"], 200000) for sample in scuba_samples: sample_parsed = json.loads(sample) self.assertEqual(