Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions pyrefly/lib/alt/function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1203,10 +1203,6 @@ impl<'a, Ans: LookupAnswer> AnswersSolver<'a, Ans> {
errors: &ErrorCollector,
) {
let name = match decorator {
SpecialDecorator::StaticMethod(name) => name.as_str(),
SpecialDecorator::ClassMethod(name) => name.as_str(),
SpecialDecorator::Property(name) => name.as_str(),
SpecialDecorator::CachedProperty(name) => name.as_str(),
SpecialDecorator::EnumMember => "member",
SpecialDecorator::Override => "override",
SpecialDecorator::Final => "final",
Expand Down
55 changes: 50 additions & 5 deletions pyrefly/lib/test/decorators.rs
Original file line number Diff line number Diff line change
Expand Up @@ -246,23 +246,68 @@ testcase!(
test_invalid_top_level_function_decorators,
r#"
from typing import *
from abc import abstractstaticmethod, abstractmethod # E: `abstractstaticmethod` is deprecated
from abc import abstractmethod
from enum import member, nonmember

@member # E: can only be used on methods
@nonmember # E: can only be used on methods
@abstractmethod # E: can only be used on methods
@staticmethod # E: can only be used on methods
@classmethod # E: can only be used on methods
@abstractstaticmethod # E: can only be used on methods
@property # E: can only be used on methods
@final # E: can only be used on methods
@override # E: can only be used on methods
def f(x: int) -> int:
return x
"#,
);

testcase!(
test_top_level_descriptor_decorators,
r#"
from typing import assert_type

Comment on lines +262 to +266
Copy link

Copilot AI Mar 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PR description mentions top-level @cached_property is now allowed, but the added regression test covers @property, @staticmethod, and @classmethod only. Adding a top-level @cached_property factory/assignment case here would prevent regressions for that specific decorator.

Copilot uses AI. Check for mistakes.
def make_property():
@property
def x(self: "Point") -> int:
return self._x

return x


class Point:
def __init__(self, x: int):
self._x = x

x = make_property()


assert_type(Point(1).x, int)

@staticmethod
def utility(x: int) -> int:
return x * 2


class Helper:
compute = utility


assert_type(Helper.compute(5), int)
Copy link

Copilot AI Mar 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The new staticmethod scenario only calls Helper.compute(5) via the class. A plain undecorated function assigned as a class attribute is also callable via the class, so this test doesn’t actually validate staticmethod descriptor semantics. Consider also asserting Helper().compute(5) is int (instance access is where @staticmethod differs from a regular function attribute).

Suggested change
assert_type(Helper.compute(5), int)
assert_type(Helper.compute(5), int)
assert_type(Helper().compute(5), int)

Copilot uses AI. Check for mistakes.

@classmethod
def from_value(cls, val: int) -> "Container":
obj = cls()
obj.value = val
return obj


class Container:
value: int = 0
from_value = from_value


assert_type(Container.from_value(42), Container)
"#,
);

testcase!(
test_callable_class_as_decorator,
r#"
Expand Down
2 changes: 1 addition & 1 deletion pyrefly/lib/test/lsp/lsp_interaction/pytorch_benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ fn test_pytorch_error_propagation_latency() {
};
// Use all available cores for realistic benchmarking
let mut interaction =
LspInteraction::new_with_args(args, NoTelemetry, Some(ThreadCount::AllThreads));
LspInteraction::new_with_args(args, NoTelemetry, Some(ThreadCount::AllThreads), None);
interaction.set_root(pytorch_root.clone());

interaction
Expand Down
Loading