Skip to content

[flake8-bugbear] Add async-lru and aiocache decorators to the B019 rule checker #16450

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
199 changes: 195 additions & 4 deletions crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B019.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""
Should emit:
B019 - on lines 73, 77, 81, 85, 89, 93, 97, 101
"""
import aiocache
from aiocache import cached, cached_stampede, multi_cached
import async_lru
from async_lru import alru_cache
import functools
from functools import cache, cached_property, lru_cache

Expand All @@ -14,6 +14,21 @@ def some_other_cache():
def compute_func(self, y):
...

@async_lru.alru_cache
async def async_lru_compute_func(self, y):
...

@aiocache.cached
async def aiocache_compute_func(self, y):
...

@aiocache.cached_stampede
async def aiocache_compute_func(self, y):
...

@aiocache.multi_cached
async def aiocache_compute_func(self, y):
...

class Foo:
def __init__(self, x):
Expand All @@ -25,6 +40,12 @@ def compute_method(self, y):
@some_other_cache
def user_cached_instance_method(self, y):
...

@some_other_cache
async def async_user_cached_instance_method(self, y):
...

# functools

@classmethod
@functools.cache
Expand Down Expand Up @@ -74,7 +95,93 @@ def some_cached_property(self):
def some_other_cached_property(self):
...

# async_lru

@classmethod
@async_lru.alru_cache
def alru_cached_classmethod(cls, y):
...

@classmethod
@alru_cache
def other_alru_cached_classmethod(cls, y):
...

@staticmethod
@async_lru.alru_cache
def alru_cached_staticmethod(y):
...

@staticmethod
@alru_cache
def other_alru_cached_staticmethod(y):
...

# aiocache

@classmethod
@aiocache.cached
def aiocache_cached_classmethod(cls, y):
...

@classmethod
@cached
def other_aiocache_cached_classmethod(cls, y):
...

@staticmethod
@aiocache.cached
def aiocache_cached_staticmethod(y):
...

@staticmethod
@cached
def other_aiocache_cached_staticmethod(y):
...

@classmethod
@aiocache.cached_stampede
def aiocache_cached_stampede_classmethod(cls, y):
...

@classmethod
@cached_stampede
def other_aiocache_cached_stampede_classmethod(cls, y):
...

@staticmethod
@aiocache.cached_stampede
def aiocache_cached_stampede_staticmethod(y):
...

@staticmethod
@cached_stampede
def other_aiocache_cached_stampede_staticmethod(y):
...

@classmethod
@aiocache.multi_cached
def aiocache_multi_cached_classmethod(cls, y):
...

@classmethod
@multi_cached
def other_aiocache_multi_cached_classmethod(cls, y):
...

@staticmethod
@aiocache.multi_cached
def aiocache_multi_cached_staticmethod(y):
...

@staticmethod
@multi_cached
def other_aiocache_multi_cached_staticmethod(y):
...

# Remaining methods should emit B019

# functools
@functools.cache
def cached_instance_method(self, y):
...
Expand Down Expand Up @@ -106,6 +213,74 @@ def called_lru_cached_instance_method(self, y):
@lru_cache()
def another_called_lru_cached_instance_method(self, y):
...

# async_lru

@async_lru.alru_cache
async def alru_cached_instance_method(self, y):
...

@alru_cache
async def another_alru_cached_instance_method(self, y):
...

@async_lru.alru_cache()
async def called_alru_cached_instance_method(self, y):
...

@alru_cache()
async def another_called_alru_cached_instance_method(self, y):
...

# aiocache

@aiocache.cached
async def aiocache_cached_instance_method(self, y):
...

@cached
async def another_aiocache_cached_instance_method(self, y):
...

@aiocache.cached()
async def called_aiocache_cached_instance_method(self, y):
...

@cached()
async def another_called_aiocache_cached_instance_method(self, y):
...

@aiocache.cached_stampede
async def aiocache_cached_stampede_instance_method(self, y):
...

@cached_stampede
async def another_aiocache_cached_stampede_instance_method(self, y):
...

@aiocache.cached_stampede()
async def called_aiocache_cached_stampede_instance_method(self, y):
...

@cached_stampede()
async def another_cached_stampede_aiocache_cached_instance_method(self, y):
...

@aiocache.multi_cached
async def aiocache_multi_cached_instance_method(self, y):
...

@multi_cached
async def another_aiocache_multi_cached_instance_method(self, y):
...

@aiocache.multi_cached()
async def called_aiocache_multi_cached_instance_method(self, y):
...

@multi_cached()
async def another_called_aiocache_multi_cached_instance_method(self, y):
...


import enum
Expand All @@ -124,3 +299,19 @@ class Metaclass(type):
@functools.lru_cache
def lru_cached_instance_method_on_metaclass(cls, x: int):
...

@async_lru.alru_cache
async def alru_cached_instance_method_on_metaclass(cls, x: int):
...

@aiocache.cached
async def aiocache_cached_instance_method_on_metaclass(cls, x: int):
...

@aiocache.cached_stampede
async def aiocache_cached_stampede_instance_method_on_metaclass(cls, x: int):
...

@aiocache.multi_cached
async def aiocache_multi_cached_instance_method_on_metaclass(cls, x: int):
...
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, ViolationMetadata};
use ruff_python_ast::helpers::map_callable;
use ruff_python_ast::name::QualifiedName;
use ruff_python_ast::{self as ast, Expr};
use ruff_python_semantic::analyze::{class, function_type};
use ruff_python_semantic::{ScopeKind, SemanticModel};
Expand All @@ -9,17 +10,17 @@ use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;

/// ## What it does
/// Checks for uses of the `functools.lru_cache` and `functools.cache`
/// decorators on methods.
/// Checks for uses of caching decorators (e.g., `functools.lru_cache`,
/// `functools.cache`) or async caching decorators (e.g., `async_lru.alru_cache`, `aiocache.cached`,
/// `aiocache.cached_stampede`, `aiocache.multi_cached`) on methods.
///
/// ## Why is this bad?
/// Using the `functools.lru_cache` and `functools.cache` decorators on methods
/// can lead to memory leaks, as the global cache will retain a reference to
/// the instance, preventing it from being garbage collected.
/// Using cache decorators on methods can lead to memory leaks, as the global
/// cache will retain a reference to the instance, preventing it from being
/// garbage collected.
///
/// Instead, refactor the method to depend only on its arguments and not on the
/// instance of the class, or use the `@lru_cache` decorator on a function
/// outside of the class.
/// instance of the class, or use the decorator on a function outside of the class.
///
/// This rule ignores instance methods on enumeration classes, as enum members
/// are singletons.
Expand Down Expand Up @@ -61,15 +62,64 @@ use crate::checkers::ast::Checker;
/// ## References
/// - [Python documentation: `functools.lru_cache`](https://docs.python.org/3/library/functools.html#functools.lru_cache)
/// - [Python documentation: `functools.cache`](https://docs.python.org/3/library/functools.html#functools.cache)
/// - [Github: `async-lru`](https://github.com/aio-libs/async-lru)
/// - [Github: `aiocache`](https://github.com/aio-libs/aiocache)
/// - [don't lru_cache methods!](https://www.youtube.com/watch?v=sVjtp6tGo0g)
#[derive(ViolationMetadata)]
pub(crate) struct CachedInstanceMethod;
pub(crate) struct CachedInstanceMethod {
decorator_name: LruDecorator,
}

impl CachedInstanceMethod {
pub(crate) fn new(decorator_name: LruDecorator) -> Self {
Self { decorator_name }
}
}

impl Violation for CachedInstanceMethod {
#[derive_message_formats]
fn message(&self) -> String {
"Use of `functools.lru_cache` or `functools.cache` on methods can lead to memory leaks"
.to_string()
format!(
"Use of `{}` on methods can lead to memory leaks",
self.decorator_name
)
}
}

#[derive(Copy, Clone, Debug)]
pub(crate) enum LruDecorator {
FuncToolsLruCache,
FunctoolsCache,
AsyncLru,
AiocacheCached,
AiocacheCachedStampede,
AiocacheMultiCached,
}

impl LruDecorator {
fn from_qualified_name(qualified_name: &QualifiedName<'_>) -> Option<Self> {
match qualified_name.segments() {
["functools", "lru_cache"] => Some(Self::FuncToolsLruCache),
["functools", "cache"] => Some(Self::FunctoolsCache),
["async_lru", "alru_cache"] => Some(Self::AsyncLru),
["aiocache", "cached"] => Some(Self::AiocacheCached),
["aiocache", "cached_stampede"] => Some(Self::AiocacheCachedStampede),
["aiocache", "multi_cached"] => Some(Self::AiocacheMultiCached),
_ => None,
}
}
}

impl std::fmt::Display for LruDecorator {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::FuncToolsLruCache => f.write_str("functools.lru_cache"),
Self::FunctoolsCache => f.write_str("functools.cache"),
Self::AsyncLru => f.write_str("async_lru.alru_cache"),
Self::AiocacheCached => f.write_str("aiocache.cached"),
Self::AiocacheCachedStampede => f.write_str("aiocache.cached_stampede"),
Self::AiocacheMultiCached => f.write_str("aiocache.multi_cached"),
}
}
}

Expand All @@ -96,25 +146,28 @@ pub(crate) fn cached_instance_method(checker: &Checker, function_def: &ast::Stmt
}

for decorator in &function_def.decorator_list {
if is_cache_func(map_callable(&decorator.expression), checker.semantic()) {
// If we found a cached instance method, validate (lazily) that the class is not an enum.
if let Some(decorator_name) =
get_cache_decorator_name(map_callable(&decorator.expression), checker.semantic())
{
// Ignore if class is an enum (enum members are singletons).
if class::is_enumeration(class_def, checker.semantic()) {
return;
}

checker.report_diagnostic(Diagnostic::new(CachedInstanceMethod, decorator.range()));
checker.report_diagnostic(Diagnostic::new(
CachedInstanceMethod::new(decorator_name),
decorator.range(),
));
}
}
}

/// Returns `true` if the given expression is a call to `functools.lru_cache` or `functools.cache`.
fn is_cache_func(expr: &Expr, semantic: &SemanticModel) -> bool {
semantic
.resolve_qualified_name(expr)
.is_some_and(|qualified_name| {
matches!(
qualified_name.segments(),
["functools", "lru_cache" | "cache"]
)
})
/// Returns `Some(<decorator_name>)` if the given expression is one of the known
/// cache decorators, otherwise `None`.
fn get_cache_decorator_name(expr: &Expr, semantic: &SemanticModel) -> Option<LruDecorator> {
if let Some(qualified_name) = semantic.resolve_qualified_name(expr) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We have to gate this new behavior behind preview mode because it's a considerable extension of the rule's intent (checker.settings().preview.is_enabled())

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't understand how to write insta tests in this case, could you explain it to me or give an example?

LruDecorator::from_qualified_name(&qualified_name)
} else {
None
}
}
Loading
Loading