Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,7 @@ jobs:
run: |
uv sync
uv run pre-commit run --all
uv run pytest
uv run pytest ./tests
uv run mypy ./src
uv run ruff check
uv run ruff format --check
13 changes: 13 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -113,3 +113,16 @@ build-backend = "hatchling.build"

[tool.hatch.version]
path = "src/bibx/__init__.py"

[tool.tox]
requires = ["tox>=4.24.2"]
env_list = ["3.9", "3.10", "3.11", "3.12", "3.13"]

[tool.tox.env_run_base]
deps = [
"pytest>=8.3.5",
"ruff>=0.9.10",
"mypy~=1.9.0",
"types-requests>=2.32.0.20241016",
]
commands = [["ruff", "check"], ["pytest", "tests/"], ["mypy", "src/"]]
14 changes: 8 additions & 6 deletions src/bibx/builders/openalex.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,12 @@ def _extract_doi(url: str) -> str:

@classmethod
def _work_to_article(cls, work: Work) -> Article:
journal = None
if work.primary_location and work.primary_location.source:
journal = work.primary_location.source.display_name
permalink = None
if work.primary_location and work.primary_location.landing_page_url:
permalink = work.primary_location.landing_page_url
article = Article(
label=work.id,
ids={
Expand All @@ -105,16 +111,12 @@ def _work_to_article(cls, work: Work) -> Article:
authors=[cls._invert_name(a.author.display_name) for a in work.authorships],
year=work.publication_year,
title=work.title,
journal=(
work.primary_location
and work.primary_location.source
and work.primary_location.source.display_name
),
journal=journal,
volume=work.biblio.volume,
issue=work.biblio.issue,
page=work.biblio.first_page,
doi=cls._extract_doi(work.doi) if work.doi else None,
_permalink=work.primary_location and work.primary_location.landing_page_url,
_permalink=permalink,
times_cited=work.cited_by_count,
references=[cls._reference_to_article(r) for r in work.referenced_works],
keywords=[k.display_name for k in work.keywords],
Expand Down
4 changes: 2 additions & 2 deletions src/bibx/builders/scopus_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,11 +91,11 @@ def build(self) -> Collection:
articles = self._articles_from_files()
return Collection(articles=Collection.deduplicate_articles(list(articles)))

def _articles_from_files(self) -> Generator[Article]:
def _articles_from_files(self) -> Generator[Article, None, None]:
for file in self._files:
yield from self._parse_file(file)

def _parse_file(self, file: TextIO) -> Generator[Article]:
def _parse_file(self, file: TextIO) -> Generator[Article, None, None]:
reader = csv.DictReader(file)
for row in reader:
datum = Row.model_validate(row)
Expand Down
2 changes: 1 addition & 1 deletion src/bibx/builders/wos.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ def _parse(cls, key: str, value: list[str]) -> dict:
if key in cls.FIELDS:
field = cls.FIELDS[key]
parsed_value = field.parse(value)
return {new_key: parsed_value for new_key in [field.key, *field.aliases]}
return dict.fromkeys([field.key, *field.aliases], parsed_value)

logger.debug("Found an unknown field with key %s and value %s", key, value)
return {key: _ident(value)}
2 changes: 1 addition & 1 deletion src/bibx/collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def _uniqe_articles_by_id(cls, articles: list[Article]) -> dict[str, Article]:
articles.append(article)
visited.add(id(article))
merged = reduce(Article.merge, articles)
article_by_id.update({id_: merged for id_ in ids})
article_by_id.update(dict.fromkeys(ids, merged))

return article_by_id

Expand Down
2 changes: 2 additions & 0 deletions stubs/bibtexparser/bparser.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
class BibDatabase:
entries: list[dict]
Empty file.
Empty file.
Empty file.
6 changes: 6 additions & 0 deletions stubs/networkx/classes/reportviews.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from collections.abc import Iterator, Mapping, Set

class NodeView(Mapping, Set):
def __getitem__(self, key: str, /) -> dict: ...
def __iter__(self) -> Iterator[str]: ...
def __len__(self) -> int: ...