Skip to content

Commit 7fc38c8

Browse files
Formats code in documentation with black
This change updates our documentation generation to format all code blocks with `black`. This helps ensure that lines do not overflow and create scrollable elements in the rendered docs. For lines that `black` doesn't touch (e.g. comments), we include our own assertions to ensure they don't exceed the length limits. The logic to extract code from a code block (i.e. stripping out markup) has also been consolidated. Finally, this change removes the `manual` test cadence and related tests since they were redundant (the same things are tested during doc generation) and didn't add any value (the thought was they would make it easier to debug, but (1) they don't and (2) it's rarely difficult to figure out what's wrong with the code blocks in documentation)
1 parent a2d3d11 commit 7fc38c8

File tree

12 files changed

+164
-160
lines changed

12 files changed

+164
-160
lines changed

tripy/docs/conf.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -286,10 +286,9 @@ def allow_no_example():
286286

287287
code_block_lines, local_var_lines, output_lines, _ = helper.process_code_block_for_outputs_and_locals(
288288
block,
289-
block.code(),
290289
format_contents=lambda title, contents, lang: f"\n\n.. code-block:: {lang}\n"
291290
+ indent((f":caption: {title}" if title else "") + f"\n\n{contents}", prefix=" " * helper.TAB_SIZE),
292-
err_msg=f"Failed while processing docstring for: {what}: {name} ({obj})",
291+
err_msg=f"Failed while processing docstring for: {what}: {name} ({obj}): ",
293292
strip_assertions=True,
294293
)
295294

tripy/docs/generate_rsts.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -208,9 +208,8 @@ def add_block(title, contents, lang):
208208
code_block_lines, local_var_lines, output_lines, code_locals = (
209209
helper.process_code_block_for_outputs_and_locals(
210210
block.raw_str(),
211-
str(block),
212211
format_contents=add_block,
213-
err_msg=f"Error while executing code block from {guide_path}.",
212+
err_msg=f"Error while executing code block {index} (line {block.line_number}) from {guide_path}. ",
214213
local_vars=code_locals,
215214
)
216215
)

tripy/docs/post0_developer_guides/how-to-add-new-ops.md

Lines changed: 42 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -47,9 +47,10 @@ from tripy.flat_ir.ops.base import BaseFlatIROp
4747
class ThetaOp(BaseFlatIROp):
4848
dim: int
4949

50-
# `to_mlir()` is the trickiest bit. As the name implies, the method is meant to lower the
51-
# `FlatIR` operator into MLIR. To figure out which MLIR operators to use, refer to
52-
# the 'MLIR Python API Guide' (linked below).
50+
# `to_mlir()` is the trickiest bit. As the name implies, the method is
51+
# meant to lower the `FlatIR` operator into MLIR. To figure out which
52+
# MLIR operators to use, refer to the 'MLIR Python API Guide'
53+
# (linked below).
5354
def to_mlir(self, operands):
5455
out_type = self.outputs[0].to_mlir()
5556
theta_dim = ir.IntegerAttr.get(type=ir.IntegerType.get_signless(64), value=self.dim)
@@ -116,29 +117,31 @@ from tripy.frontend.trace.ops.base import BaseTraceOp
116117
import tripy.frontend.trace.ops.utils as op_utils
117118

118119

119-
# Just like with `FlatIR` operators, all `Trace` operators are implemented as `dataclass`es.
120-
# As before, we want `repr=False` here.
120+
# Just like with `FlatIR` operators, all `Trace` operators are implemented
121+
# as `dataclass`es. As before, we want `repr=False` here.
121122
@dataclass(repr=False)
122123
class Theta(BaseTraceOp):
123-
# Notice that we do *not* need to define a constructor and can rely on the default
124-
# implementation provided by `dataclass`.
124+
# Notice that we do *not* need to define a constructor and can rely on
125+
# the default implementation provided by `dataclass`.
125126
dim: int
126127
dtype: datatype.dtype
127128

128129
# `infer_rank()` populates the rank of the output `TraceTensor`s.
129-
# Here we use one of the predefined policies to set the output rank to the same as the shape (i.e. the length)
130-
# of the shape operand.
130+
# Here we use one of the predefined policies to set the output rank
131+
# to the same as the shape (i.e. the length) of the shape operand.
131132
infer_rank = op_utils.InferRankPolicies.same_as_shape_of_shape_input()
132133

133134
# *Optional* `infer_dtypes()` populates the data types of the
134135
# output `TraceTensor`s. The default implementation copies the input
135-
# data types if they are all the same, so you may not need to implement this.
136+
# data types if they are all the same, so you may not need to implement
137+
# this.
136138
def infer_dtypes(self):
137139
self.outputs[0].dtype = self.dtype
138140

139141
# *Optional* `infer_devices()` populates the devices of the
140142
# output `TraceTensor`s. The default implementation copies the input
141-
# devices if they are all the same, so you may not need to implement this either.
143+
# devices if they are all the same, so you may not need to implement
144+
# this either.
142145
def infer_devices(self):
143146
self.outputs[0].device = device("gpu")
144147

@@ -177,30 +180,35 @@ from tripy import export
177180
import tripy.frontend.utils as frontend_utils
178181
from tripy.types import ShapeLike
179182

180-
# We can use the `export.public_api()` decorator to automatically export this function into the
181-
# top-level module. This means it will be accessible as `tripy.theta`.
183+
# We can use the `export.public_api()` decorator to automatically export this
184+
# function into the top-level module. This means it will be accessible as
185+
# `tripy.theta`.
182186
#
183-
# This decorator also controls how the API is exposed in the documentation - the `document_under`
184-
# option determines where in the documentation hierarchy this API will show up.
187+
# This decorator also controls how the API is exposed in the documentation -
188+
# the `document_under` option determines where in the documentation hierarchy
189+
# this API will show up.
185190
#
186-
# If we needed to provide any special autodoc options, we could use the `autodoc_options` parameter.
191+
# If we needed to provide any special autodoc options, we could use the
192+
# `autodoc_options` parameter.
187193
@export.public_api(document_under="tensor_operations")
188194

189-
# The `convert_to_tensors` decorator automatically converts compatible arguments,
190-
# like `TensorLike` or `ShapeLike`s, into tensors.
195+
# The `convert_to_tensors` decorator automatically converts compatible
196+
# arguments, like `TensorLike` or `ShapeLike`s, into tensors.
191197
@frontend_utils.convert_to_tensors()
192198
def theta(shape: ShapeLike, dim: int = 0, dtype: datatype.dtype = datatype.float32) -> "tripy.Tensor":
193-
# For any public facing interfaces, we have documentation requirements which you can read
194-
# about in the 'Docs README' (linked below). The docstring we've implemented here
195-
# adheres to all of these requirements. Non-compliant docstrings will, in most cases,
196-
# cause test failures; however, you should still manually ensure you're writing high-quality
197-
# docstrings.
199+
# For any public facing interfaces, we have documentation requirements which
200+
# you can read about in the 'Docs README' (linked below). The docstring
201+
# we've implemented here adheres to all of these requirements. Non-compliant
202+
# docstrings will, in most cases, cause test failures; however, you should
203+
# still manually ensure you're writing high-quality docstrings.
198204
#
199-
# The examples in docstrings are run as part of our tests, so you should also add
200-
# assertions to make sure things are functionally correct. In this case, we check
201-
# that the `output` we create in the code example is what we expect.
205+
# The examples in docstrings are run as part of our tests, so you should
206+
# also add assertions to make sure things are functionally correct. In this
207+
# case, we check that the `output` we create in the code example is what we
208+
# expect.
202209
"""
203-
Fills an output tensor with consecutive values starting from zero along the given dimension.
210+
Fills an output tensor with consecutive values starting from zero
211+
along the given dimension.
204212
205213
Args:
206214
shape: The desired shape.
@@ -217,12 +225,15 @@ def theta(shape: ShapeLike, dim: int = 0, dtype: datatype.dtype = datatype.float
217225
218226
output = tp.theta([3])
219227
220-
assert np.array_equal(cp.from_dlpack(output).get(), np.arange(0, 3, dtype=np.float32))
228+
assert np.array_equal(
229+
cp.from_dlpack(output).get(), np.arange(0, 3, dtype=np.float32)
230+
)
221231
"""
222232

223-
# Next we build the trace operator. The `build()` function is also responsible for constructing
224-
# the output frontend Tensors. All of the arguments that follow the inputs
225-
# are forwarded directly to the constructor of the `Trace` operator.
233+
# Next we build the trace operator. The `build()` function is also
234+
# responsible for constructing the output frontend Tensors. All of the
235+
# arguments that follow the inputs are forwarded directly to the
236+
# constructor of the `Trace` operator.
226237
return Theta.build([shape], dim, dtype)
227238

228239
```

tripy/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ build = [
4040
"mypy==1.11.0",
4141
]
4242
doc_test_common = [
43+
"black==24.10.0",
4344
"torch==2.4.0+cu121",
4445
"numpy==1.25.0",
4546
# cupy requires NVRTC but does not specify it as a package dependency
@@ -96,5 +97,4 @@ testpaths = [
9697
addopts = "--strict-markers"
9798
markers = [
9899
"l1: Indicates that the test should only be run in nightlies.",
99-
"manual: Disables tests in automation",
100100
]

tripy/tests/README.md

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,12 @@ You can also provide marker arguments to only run specific test cadences
1818
L0 tests, use:
1919

2020
```bash
21-
pytest tests/ -v -m "not l1 and not manual" -n 4 --dist worksteal --ignore tests/performance
22-
pytest tests/performance -v -m "not l1 and not manual"
21+
pytest tests/ -v -m "not l1" -n 4 --dist worksteal --ignore tests/performance
22+
pytest tests/performance -v -m "not l1"
2323
```
2424

25-
Note that the L0/L1 tests can be parallelized, which is not necessarily
26-
true of `manual` tests. In that case, performance tests are run separately
27-
because they must run serially to ensure accurate measurements.
25+
Note that the L0/L1 tests can be parallelized. In that case, performance tests
26+
are run separately because they must run serially to ensure accurate measurements.
2827

2928
## Profiling
3029

@@ -36,7 +35,7 @@ tests together.
3635
For example, to profile L0 tests, run:
3736

3837
```bash
39-
pytest tests/ -v -m "not l1 and not manual" --ignore tests/performance --profile
38+
pytest tests/ -v -m "not l1" --ignore tests/performance --profile
4039
```
4140

4241
You can visualize the results using `snakeviz`.

0 commit comments

Comments
 (0)