Skip to content

Commit

Permalink
Merge pull request yt-project#4868 from neutrinoceros/sty/black2ruff
Browse files Browse the repository at this point in the history
STY: migrate formatting from black to ruff-format
  • Loading branch information
neutrinoceros committed Apr 29, 2024
2 parents 5dd2dff + 9394c8e commit cd4f1d6
Show file tree
Hide file tree
Showing 23 changed files with 90 additions and 92 deletions.
3 changes: 3 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,6 @@ ec8bb45ea1603f3862041fa9e8ec274afd9bbbfd

# auto upgrade typing idioms from Python 3.8 to 3.9
4cfd370a8445abd4620e3853c2c047ee3d649fd7

# migration: black -> ruff-format
3214662dc7d53f7ac56b3589c5a27ef075f83ab4
14 changes: 7 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,21 +27,21 @@ repos:
- id: check-executables-have-shebangs
- id: check-yaml

- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.3.0
hooks:
- id: black-jupyter

# TODO: replace this with ruff when it supports embedded python blocks
# see https://github.com/astral-sh/ruff/issues/8237
- repo: https://github.com/adamchainz/blacken-docs
rev: 1.16.0
hooks:
- id: blacken-docs
additional_dependencies: [black==23.9.1]
additional_dependencies: [black==24.3.0]

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.4
rev: v0.4.0
hooks:
- id: ruff-format
types_or: [ python, pyi, jupyter ]
- id: ruff
types_or: [ python, pyi, jupyter ]
args: [--fix, "--show-fixes"]

- repo: https://github.com/pre-commit/pygrep-hooks
Expand Down
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
[![Build and Test](https://github.com/yt-project/yt/actions/workflows/build-test.yaml/badge.svg)](https://github.com/yt-project/yt/actions/workflows/build-test.yaml)
[![CI (bleeding edge)](https://github.com/yt-project/yt/actions/workflows/bleeding-edge.yaml/badge.svg)](https://github.com/yt-project/yt/actions/workflows/bleeding-edge.yaml)
[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/yt-project/yt/main.svg)](https://results.pre-commit.ci/latest/github/yt-project/yt/main)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json)](https://github.com/charliermarsh/ruff)

<!--- [![codecov](https://codecov.io/gh/yt-project/yt/branch/main/graph/badge.svg)](https://codecov.io/gh/yt-project/yt) --->
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -244,10 +244,9 @@ zip-safe = false
[tool.setuptools.packages.find]
namespaces = false


[tool.black]
line-length = 88
target-version = ['py39']
# TODO: drop this section when ruff supports embedded python blocks
# see https://github.com/astral-sh/ruff/issues/8237
include = '\.pyi?$'
exclude = '''
/(
Expand Down Expand Up @@ -280,6 +279,7 @@ exclude = [
"yt/utilities/lodgeit.py",
"yt/mods.py",
"yt/visualization/_colormap_data.py",
"yt/exthook.py",
]

[tool.ruff.lint]
Expand Down
2 changes: 1 addition & 1 deletion tests/report_failed_answers.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,7 +402,7 @@ def handle_error(error, testcase, missing_errors, missing_answers, failed_answer
COLOR_PURPLE = "\x1b[35;1m"
COLOR_CYAN = "\x1b[36;1m"
COLOR_RESET = "\x1b[0m"
FLAG_EMOJI = " \U0001F6A9 "
FLAG_EMOJI = " \U0001f6a9 "

failed_answers = missing_answers = None
if args.upload_failed_tests or args.upload_missing_answers:
Expand Down
9 changes: 3 additions & 6 deletions yt/data_objects/construction_data_containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1136,22 +1136,19 @@ def _generate_container_field(self, field):
np.multiply(rv, self.dds[2], rv)
elif field == ("index", axis_name[0]):
x = np.mgrid[
self.left_edge[0]
+ 0.5 * self.dds[0] : self.right_edge[0]
self.left_edge[0] + 0.5 * self.dds[0] : self.right_edge[0]
- 0.5 * self.dds[0] : self.ActiveDimensions[0] * 1j
]
np.multiply(rv, x[:, None, None], rv)
elif field == ("index", axis_name[1]):
y = np.mgrid[
self.left_edge[1]
+ 0.5 * self.dds[1] : self.right_edge[1]
self.left_edge[1] + 0.5 * self.dds[1] : self.right_edge[1]
- 0.5 * self.dds[1] : self.ActiveDimensions[1] * 1j
]
np.multiply(rv, y[None, :, None], rv)
elif field == ("index", axis_name[2]):
z = np.mgrid[
self.left_edge[2]
+ 0.5 * self.dds[2] : self.right_edge[2]
self.left_edge[2] + 0.5 * self.dds[2] : self.right_edge[2]
- 0.5 * self.dds[2] : self.ActiveDimensions[2] * 1j
]
np.multiply(rv, z[None, None, :], rv)
Expand Down
6 changes: 3 additions & 3 deletions yt/data_objects/level_sets/clump_handling.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,9 +334,9 @@ def save_as_dataset(self, filename=None, fields=None):
np.int64
)
ftypes[cfield] = ptype
field_data[cfield][
clump.data._part_ind(ptype)
] = clump.contour_id
field_data[cfield][clump.data._part_ind(ptype)] = (
clump.contour_id
)

if need_grid_positions:
for ax in "xyz":
Expand Down
6 changes: 3 additions & 3 deletions yt/data_objects/profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,9 +131,9 @@ def _finalize_storage(self, fields, temp_storage):

for i, _field in enumerate(fields):
# q values are returned as q * weight but we want just q
temp_storage.qvalues[..., i][
temp_storage.used
] /= temp_storage.weight_values[temp_storage.used]
temp_storage.qvalues[..., i][temp_storage.used] /= (
temp_storage.weight_values[temp_storage.used]
)

# get the profile data from all procs
all_store = {self.comm.rank: temp_storage}
Expand Down
6 changes: 3 additions & 3 deletions yt/data_objects/tests/test_dataset_access.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,12 @@ def test_region_from_d():
assert_equal(reg1["gas", "density"], reg2["gas", "density"])

# Now, string units in some -- 1.0 == cm
reg1 = ds.r[(0.1, "cm"):(0.5, "cm"), :, (0.25, "cm"):(0.35, "cm")]
reg1 = ds.r[(0.1, "cm") : (0.5, "cm"), :, (0.25, "cm") : (0.35, "cm")]
reg2 = ds.region([0.3, 0.5, 0.3], [0.1, 0.0, 0.25], [0.5, 1.0, 0.35])
assert_equal(reg1["gas", "density"], reg2["gas", "density"])

# Now, string units in some -- 1.0 == cm
reg1 = ds.r[(0.1, "cm"):(0.5, "cm"), :, 0.25:0.35]
reg1 = ds.r[(0.1, "cm") : (0.5, "cm"), :, 0.25:0.35]
reg2 = ds.region([0.3, 0.5, 0.3], [0.1, 0.0, 0.25], [0.5, 1.0, 0.35])
assert_equal(reg1["gas", "density"], reg2["gas", "density"])

Expand Down Expand Up @@ -127,7 +127,7 @@ def test_point_from_r():

def test_ray_from_r():
ds = fake_amr_ds(fields=["density"], units=["g/cm**3"])
ray1 = ds.r[(0.1, 0.2, 0.3):(0.4, 0.5, 0.6)]
ray1 = ds.r[(0.1, 0.2, 0.3) : (0.4, 0.5, 0.6)]
ray2 = ds.ray((0.1, 0.2, 0.3), (0.4, 0.5, 0.6))
assert_equal(ray1["gas", "density"], ray2["gas", "density"])

Expand Down
2 changes: 1 addition & 1 deletion yt/frontends/chimera/io.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
Chimera-specific IO functions
Chimera-specific IO functions
Expand Down
6 changes: 2 additions & 4 deletions yt/frontends/chombo/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -511,8 +511,7 @@ def _parse_parameter_file(self):
domain_right_edge = np.zeros(self.dimensionality)
for il, ll in enumerate(
lines[
lines.index("[Grid]")
+ 2 : lines.index("[Grid]")
lines.index("[Grid]") + 2 : lines.index("[Grid]")
+ 2
+ self.dimensionality
]
Expand All @@ -522,8 +521,7 @@ def _parse_parameter_file(self):
self._periodicity = [0] * 3
for il, ll in enumerate(
lines[
lines.index("[Boundary]")
+ 2 : lines.index("[Boundary]")
lines.index("[Boundary]") + 2 : lines.index("[Boundary]")
+ 2
+ 6 : 2
]
Expand Down
5 changes: 2 additions & 3 deletions yt/frontends/enzo/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -1017,9 +1017,8 @@ def __init__(self, parameter_override=None, conversion_override=None):

def _parse_parameter_file(self):
enzo = self._obtain_enzo()
self._input_filename = "cycle%08i" % (
enzo.yt_parameter_file["NumberOfPythonCalls"]
)
ncalls = enzo.yt_parameter_file["NumberOfPythonCalls"]
self._input_filename = f"cycle{ncalls:08d}"
self.parameters["CurrentTimeIdentifier"] = time.time()
self.parameters.update(enzo.yt_parameter_file)
self.conversion_factors.update(enzo.conversion_factors)
Expand Down
3 changes: 1 addition & 2 deletions yt/frontends/enzo/simulation_handling.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,8 +264,7 @@ def get_time_series(
my_outputs = my_all_outputs[
int(
np.ceil(float(initial_cycle) / self.parameters["CycleSkipDataDump"])
) : (final_cycle / self.parameters["CycleSkipDataDump"])
+ 1
) : (final_cycle / self.parameters["CycleSkipDataDump"]) + 1
]

else:
Expand Down
35 changes: 12 additions & 23 deletions yt/frontends/gamer/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,42 +143,31 @@ def _validate_parent_children_relationship(self):
for grid in self.grids:
# parent->children == itself
if grid.Parent is not None:
assert (
grid in grid.Parent.Children
), "Grid %d, Parent %d, Parent->Children[0] %d" % (
grid.id,
grid.Parent.id,
grid.Parent.Children[0].id,
assert grid in grid.Parent.Children, (
f"Grid {grid.id}, Parent {grid.Parent.id}, "
f"Parent->Children[0] {grid.Parent.Children[0].id}"
)

# children->parent == itself
for c in grid.Children:
assert c.Parent is grid, "Grid %d, Children %d, Children->Parent %d" % (
grid.id,
c.id,
c.Parent.id,
assert c.Parent is grid, (
f"Grid {grid.id}, Children {c.id}, "
f"Children->Parent {c.Parent.id}"
)

# all refinement grids should have parent
if grid.Level > 0:
assert (
grid.Parent is not None and grid.Parent.id >= 0
), "Grid %d, Level %d, Parent %d" % (
grid.id,
grid.Level,
grid.Parent.id if grid.Parent is not None else -999,
assert grid.Parent is not None and grid.Parent.id >= 0, (
f"Grid {grid.id}, Level {grid.Level}, "
f"Parent {grid.Parent.id if grid.Parent is not None else -999}"
)

# parent index is consistent with the loaded dataset
if grid.Level > 0:
father_gid = father_list[grid.id * self.pgroup] // self.pgroup
assert (
father_gid == grid.Parent.id
), "Grid %d, Level %d, Parent_Found %d, Parent_Expect %d" % (
grid.id,
grid.Level,
grid.Parent.id,
father_gid,
assert father_gid == grid.Parent.id, (
f"Grid {grid.id}, Level {grid.Level}, "
f"Parent_Found {grid.Parent.id}, Parent_Expect {father_gid}"
)

# edges between children and parent
Expand Down
3 changes: 2 additions & 1 deletion yt/frontends/ramses/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -629,7 +629,8 @@ def max_level(self):
@cached_property
def num_grids(self):
return sum(
dom.local_oct_count for dom in self.domains # + dom.ngridbound.sum()
dom.local_oct_count
for dom in self.domains # + dom.ngridbound.sum()
)

def _detect_output_fields(self):
Expand Down
14 changes: 9 additions & 5 deletions yt/frontends/ramses/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,11 +231,15 @@ def _read_particle_coords(self, chunks, ptf):
for subset in chunk.objs:
rv = self._read_particle_subset(subset, fields)
for ptype in sorted(ptf):
yield ptype, (
rv[ptype, pn % "x"],
rv[ptype, pn % "y"],
rv[ptype, pn % "z"],
), 0.0
yield (
ptype,
(
rv[ptype, pn % "x"],
rv[ptype, pn % "y"],
rv[ptype, pn % "z"],
),
0.0,
)

def _read_particle_fields(self, chunks, ptf, selector):
pn = "particle_position_%s"
Expand Down
4 changes: 1 addition & 3 deletions yt/frontends/ramses/tests/test_outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,9 +318,7 @@ def test_ramses_part_count():
@requires_file(ramsesCosmo)
def test_custom_particle_def():
ytcfg.add_section("ramses-particles")
ytcfg[
"ramses-particles", "fields"
] = """particle_position_x, d
ytcfg["ramses-particles", "fields"] = """particle_position_x, d
particle_position_y, d
particle_position_z, d
particle_velocity_x, d
Expand Down
28 changes: 18 additions & 10 deletions yt/frontends/sdf/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,15 @@ def _read_particle_coords(self, chunks, ptf):
data_files = self._get_data_files(chunks)
assert len(data_files) == 1
for _data_file in sorted(data_files, key=lambda x: (x.filename, x.start)):
yield "dark_matter", (
self._handle["x"],
self._handle["y"],
self._handle["z"],
), 0.0
yield (
"dark_matter",
(
self._handle["x"],
self._handle["y"],
self._handle["z"],
),
0.0,
)

def _read_particle_fields(self, chunks, ptf, selector):
assert len(ptf) == 1
Expand Down Expand Up @@ -78,11 +82,15 @@ def _read_particle_coords(self, chunks, ptf):
assert len(data_files) == 1
for _data_file in data_files:
pcount = self._handle["x"].size
yield "dark_matter", (
self._handle["x"][:pcount],
self._handle["y"][:pcount],
self._handle["z"][:pcount],
), 0.0
yield (
"dark_matter",
(
self._handle["x"][:pcount],
self._handle["y"][:pcount],
self._handle["z"][:pcount],
),
0.0,
)

def _read_particle_fields(self, chunks, ptf, selector):
chunks = list(chunks)
Expand Down
14 changes: 9 additions & 5 deletions yt/frontends/stream/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,11 +108,15 @@ def _read_particle_coords(self, chunks, ptf):
f = self.fields[data_file.filename]
# This double-reads
for ptype in sorted(ptf):
yield ptype, (
f[ptype, "particle_position_x"],
f[ptype, "particle_position_y"],
f[ptype, "particle_position_z"],
), 0.0
yield (
ptype,
(
f[ptype, "particle_position_x"],
f[ptype, "particle_position_y"],
f[ptype, "particle_position_z"],
),
0.0,
)

def _read_smoothing_length(self, chunks, ptf, ptype):
for data_file in self._sorted_chunk_iterator(chunks):
Expand Down
8 changes: 4 additions & 4 deletions yt/funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,8 +220,7 @@ def rootonly(func):
.. code-block:: python
@rootonly
def some_root_only_function(*args, **kwargs):
...
def some_root_only_function(*args, **kwargs): ...
"""

@wraps(func)
Expand Down Expand Up @@ -1090,8 +1089,9 @@ def array_like_field(data, x, field):
def validate_3d_array(obj):
if not is_sequence(obj) or len(obj) != 3:
raise TypeError(
"Expected an array of size (3,), received '{}' of "
"length {}".format(str(type(obj)).split("'")[1], len(obj))
"Expected an array of size (3,), received '{}' of length {}".format(
str(type(obj)).split("'")[1], len(obj)
)
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ def test_geographic_coordinates(geometry):

@pytest.mark.parametrize("geometry", ("geographic", "internal_geographic"))
def test_geographic_conversions(geometry):

ds = fake_amr_ds(geometry=geometry)
ad = ds.all_data()
lats = ad["index", "latitude"]
Expand Down
2 changes: 1 addition & 1 deletion yt/utilities/answer_testing/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -613,7 +613,7 @@ def compare(self, new_result, old_result):
assert_equal(nres, ores, err_msg=err_msg)
else:
assert_allclose_units(
nres, ores, 10.0 ** -(self.decimals), err_msg=err_msg
nres, ores, 10.0**-(self.decimals), err_msg=err_msg
)


Expand Down

0 comments on commit cd4f1d6

Please sign in to comment.