Skip to content

Commit

Permalink
Merge pull request #454 from tataratat/develop
Browse files Browse the repository at this point in the history
  • Loading branch information
j042 authored Aug 22, 2024
2 parents 2bcf806 + 8ff127e commit 7cbec91
Show file tree
Hide file tree
Showing 9 changed files with 117 additions and 71 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ repos:
additional_dependencies: [tomli]

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.6
rev: v0.6.1
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
Expand Down
161 changes: 100 additions & 61 deletions docs/source/handle_markdown.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
Author: Clemens Fricke
"""

import argparse
import os
import pathlib
import re
Expand Down Expand Up @@ -131,69 +132,81 @@ def process_file(
Returns:
str: Content of the markdown file. Only if return_content is True.
"""
content = ""
# read in the content of the markdown file
with open(file) as f:
content = f.read()
os.chdir(os.path.dirname(file)) if os.path.dirname(file) else None
# get all links from the markdown file
links = get_markdown_links(content)

for item in links:
if not item[1].strip():
warnings.warn(
f"Empty link in `{file}`. Link name `{item[0]}` link path "
f"`{item[1]}`. Will ignore link.",
stacklevel=3,
)
continue
if item[1].startswith(("http", "#")): # skip http links and anchors
if "badge" in item[1]:
continue
content = content.replace(
f"[{item[0]}]({item[1]})",
f"<a href='{item[1]}'>{item[0]}</a>",
)
continue
elif item[1] in link_substitutions:
if relative_links:
content = content.replace(
f"[{item[0]}]({item[1]})",
f"<a href='{link_substitutions[item[1]]}'>{item[0]}</a>",
)
continue
else:
content = content.replace(
f"[{item[0]}]({item[1]})",
"See documentation for examples.",
)
elif not relative_links: # generate links to github repo
new_path = get_github_path_from(pathlib.Path(item[1]).resolve())
else: # generate relative links
common_sub_path, steps_back = get_common_parent(
item[1], folder_to_save_to
)
new_path = "../" * steps_back + str(
pathlib.Path(item[1]).resolve().relative_to(common_sub_path)
)
content = content.replace(item[1], str(new_path))

# super special links (just special images) that the sphinx markdown
# parser won't correctly handle since they are in html tags.
special_links = get_special_links(content)
for item in special_links:
if not item[0].strip():
warnings.warn(
f"Empty link in `{file}`. Link name `{item[1]}` link path "
f"`{item[0]}`. Will ignore link.",
stacklevel=3,
)
continue
if item[0].startswith("http"): # skip http links and anchors
continue
else:
# just link to static folder in docs
new_path = "_static/" + str(pathlib.Path(item[1]).name)
content = content.replace(item[1], str(new_path))
# content = f.read()
os.chdir(os.path.dirname(file)) if os.path.dirname(file) else None
for line in f:
# get all links from the markdown file
links = get_markdown_links(line)
for item in links:
if not item[1].strip():
warnings.warn(
f"Empty link in `{file}`. Link name `{item[0]}` link path "
f"`{item[1]}`. Will ignore link.",
stacklevel=3,
)
continue
if item[1].startswith(
("http", "#")
): # skip http links and anchors
if "badge" in item[1]:
continue
line = line.replace( # noqa: PLW2901
f"[{item[0]}]({item[1]})",
f"<a href='{item[1]}'>{item[0]}</a>",
)
continue
elif item[1] in link_substitutions:
if relative_links:
line = line.replace( # noqa: PLW2901
f"[{item[0]}]({item[1]})",
f"<a href='{link_substitutions[item[1]]}'>{item[0]}</a>",
)
continue
else:
line = line.replace( # noqa: PLW2901
f"[{item[0]}]({item[1]})",
"See documentation for examples.",
)
elif not relative_links: # generate links to github repo
new_path = get_github_path_from(
pathlib.Path(item[1]).resolve()
)
else: # generate relative links
common_sub_path, steps_back = get_common_parent(
item[1], folder_to_save_to
)
new_path = "../" * steps_back + str(
pathlib.Path(item[1])
.resolve()
.relative_to(common_sub_path)
)
line = line.replace(item[1], str(new_path)) # noqa: PLW2901

# super special links (just special images) that the sphinx markdown
# parser won't correctly handle since they are in html tags.
special_links = get_special_links(line)
for item in special_links:
if not item[0].strip():
warnings.warn(
f"Empty link in `{file}`. Link name `{item[1]}` link path "
f"`{item[0]}`. Will ignore link.",
stacklevel=3,
)
continue
if item[0].startswith("http"): # skip http links and anchors
continue
elif not relative_links: # generate links to github repo
new_path = get_github_path_from(
pathlib.Path(item[1]).resolve()
)
else:
# just link to static folder in docs
new_path = "_static/" + str(pathlib.Path(item[1]).name)
line = line.replace(item[1], str(new_path)) # noqa: PLW2901
content += f"{line}"

os.chdir(original_cwd)

Expand All @@ -206,7 +219,33 @@ def process_file(
f.write(content)


def prepare_file_for_PyPI():
"""Prepare the README file for PyPI.
This function will prepare the README file for PyPI. It will replace all
relative links with absolute links to the github repository.
Args:
file (str): Path to the README file.
"""
content = process_file(
"README.md", relative_links=False, return_content=True
)
with open("README.md", "w") as f:
f.write(content)


if __name__ == "__main__":
# python handle_markdown -> docs
# python handle_markdown -b -> PyPI
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--build", action="store_true")
args = parser.parse_args()
if args.build:
print("Preparing README for PyPI.") # noqa: T201
prepare_file_for_PyPI()
exit() # noqa: PLR1722
print("Processing markdown files.") # noqa: T201
os.chdir(repo_root)
os.makedirs(folder_to_save_to, exist_ok=True)
# Process all markdown files
Expand Down
3 changes: 2 additions & 1 deletion examples/ipynb/notebook_showcase_k3d.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,10 @@
"metadata": {},
"outputs": [],
"source": [
"import splinepy\n",
"import vedo\n",
"\n",
"import splinepy\n",
"\n",
"vedo.settings.default_backend = \"k3d\""
]
},
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ input = "splinepy/_version.py"
[tool.cibuildwheel]
test-extras = ["test"]
test-command = "pytest {project}/tests"
before_all = "python docs/source/handle_markdown.py -b"

[tool.cibuildwheel.macos]
archs = ["x86_64", "arm64"]
Expand Down
2 changes: 1 addition & 1 deletion splinepy/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.1.2"
__version__ = "0.1.3"
2 changes: 1 addition & 1 deletion src/proximity/proximity.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ void Proximity::Newton(SearchData& data) const {
}

void Proximity::PrepareIterationLevenbergMarquart(SearchData& data) const {
// LM requires Jac (pronounced similar to Jacques)
// LM requires Jacobian
ComputeCostAndDerivatives(data, 1);

// prepare rhs
Expand Down
4 changes: 3 additions & 1 deletion src/py/py_knot_vector.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,9 @@ void init_knot_vector(py::module_& m) {
"Returns copy of knot vectors as numpy array.")
.def(
"__array__",
[](const KnotVector& kv, [[maybe_unused]] py::args dtype_ignored) {
[](const KnotVector& kv,
[[maybe_unused]] py::args dtype_ignored,
[[maybe_unused]] py::kwargs copy_ignored) {
py::array_t<KnotType> arr(kv.GetSize());
KnotType* arr_ptr = static_cast<KnotType*>(arr.request().ptr);
for (int i{}; i < kv.GetSize(); ++i) {
Expand Down
11 changes: 7 additions & 4 deletions tests/test_proximity.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,24 @@
import pytest

# fixtures used
all_2p2d_splines = (
all_splines = (
"rational_bezier_2p2d",
"bezier_2p2d",
"bspline_2p2d",
"nurbs_2p2d",
"rational_bezier_3p3d",
"bezier_3p3d",
"bspline_3p3d",
"nurbs_3p3d",
)


@pytest.mark.parametrize("splinetype", all_2p2d_splines)
@pytest.mark.parametrize("splinetype", all_splines)
def test_queries_inside_spline_initial_guess_with_kdt(
request, splinetype, np_rng
):
"""
Initial guess made with kdt. Mid-point as initial guess tends to fail,
so excluded from test.
Initial guess made with kdt.
"""
spline = request.getfixturevalue(splinetype)
# form parametric queries
Expand Down
2 changes: 1 addition & 1 deletion third_party/pybind11
Submodule pybind11 updated 157 files

0 comments on commit 7cbec91

Please sign in to comment.