diff --git a/.github/workflows/find-all-tools.yml b/.github/workflows/find-all-tools.yml index ee7aeb6f9..563d6e7c8 100644 --- a/.github/workflows/find-all-tools.yml +++ b/.github/workflows/find-all-tools.yml @@ -29,7 +29,7 @@ jobs: ignored_dirs="polus-python-template ftl-label .venv" # List of tools that are broken for known reasons - broken_tools="binary-operations precompute-slide microjson-to-ome region-segmentation-eval basic-flatfield-estimation ftl-label cell-border-segmentation" + broken_tools="binary-operations precompute-slide microjson-to-ome region-segmentation-eval basic-flatfield-estimation ftl-label cell-border-segmentation image-assembler" # Reasons: # - binary-operations: Not fully updated to new tool standards # - precompute-slide (Najib): Single failing test: 1023_1024_Segmentation_Zarr @@ -38,6 +38,7 @@ jobs: # - basic-flatfield-estimation: Jax installation error. # - ftl-label: Requires Rust installation. Also, has not been updated to new tool standards. # - cell-border-segmentation (Hamdah): keras.models.load_model() is getting incorrect model format error. + # image-assembler: Very intermittent failures. Unable to reproduce locally. Requires further investigation. # Initialize variables ignored_dirs="$ignored_dirs $broken_tools" diff --git a/utils/rxiv-download-tool/.bumpversion.cfg b/utils/rxiv-download-tool/.bumpversion.cfg deleted file mode 100644 index 9e6ade4d6..000000000 --- a/utils/rxiv-download-tool/.bumpversion.cfg +++ /dev/null @@ -1,29 +0,0 @@ -[bumpversion] -current_version = 0.1.0 -commit = False -tag = False -parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-(?P[a-z]+)(?P\d+))? -serialize = - {major}.{minor}.{patch}-{release}{dev} - {major}.{minor}.{patch} - -[bumpversion:part:release] -optional_value = _ -first_value = dev -values = - dev - _ - -[bumpversion:part:dev] - -[bumpversion:file:pyproject.toml] -search = version = "{current_version}" -replace = version = "{new_version}" - -[bumpversion:file:VERSION] - -[bumpversion:file:README.md] - -[bumpversion:file:plugin.json] - -[bumpversion:file:src/polus/images/utils/rxiv_download/__init__.py] diff --git a/utils/rxiv-download-tool/.dockerignore b/utils/rxiv-download-tool/.dockerignore deleted file mode 100644 index 7c603f814..000000000 --- a/utils/rxiv-download-tool/.dockerignore +++ /dev/null @@ -1,4 +0,0 @@ -.venv -out -tests -__pycache__ diff --git a/utils/rxiv-download-tool/.gitignore b/utils/rxiv-download-tool/.gitignore deleted file mode 100644 index c9c7ae717..000000000 --- a/utils/rxiv-download-tool/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -poetry.lock -out diff --git a/utils/rxiv-download-tool/Dockerfile b/utils/rxiv-download-tool/Dockerfile deleted file mode 100644 index c9d78f6b7..000000000 --- a/utils/rxiv-download-tool/Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -FROM polusai/bfio:2.3.6 - -# environment variables defined in polusai/bfio -ENV EXEC_DIR="/opt/executables" -ENV POLUS_IMG_EXT=".ome.tif" -ENV POLUS_TAB_EXT=".csv" -ENV POLUS_LOG="INFO" - -# Work directory defined in the base container -WORKDIR ${EXEC_DIR} - -# TODO: Change the tool_dir to the tool directory -ENV TOOL_DIR="utils/rxiv-download-tool" - -# Copy the repository into the container -RUN mkdir image-tools -COPY . ${EXEC_DIR}/image-tools - -# Install the tool -RUN pip3 install "${EXEC_DIR}/image-tools/${TOOL_DIR}" --no-cache-dir - -# Set the entrypoint -# TODO: Change the entrypoint to the tool entrypoint -ENTRYPOINT ["python3", "-m", "polus.images.utils.rxiv_download"] -CMD ["--help"] diff --git a/utils/rxiv-download-tool/README.md b/utils/rxiv-download-tool/README.md deleted file mode 100644 index 4a2452055..000000000 --- a/utils/rxiv-download-tool/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# Rxiv Download (v0.1.0) - -This plugin allows to download data from open access archives. Currently this plugin supports downloading data from [arxiv](https://www.openarchives.org/). Later additional support for other archives will be added. - -## Building - -To build the Docker image for the download plugin, run -`bash build-docker.sh`. - -## Run the Docker image - -To execute the built docker image for the download plugin, run -`bash run-plugin.sh`. - -## Options - -This plugin takes 2 input arguments and -1 output argument: - -| Name | Description | I/O | Type | -| --------------- | ------------------------------------------------------------ | ------ | ----------- | -| `--rxiv ` | Download data from open access archives | Input | String | -| `--start ` | Start date | Input | String | -| `--outDir` | Directory to store the downloaded data | Output | genericData | -| `--preview` | Generate a JSON file with outputs | Output | JSON | - - - -## Sample docker command: -```docker run -v /home/ec2-user/data/:/home/ec2-user/data/ polusai/rxiv-download-tool:0.1.0 --rxiv="arXiv" --start='2023-2-16' --outDir=/home/ec2-user/data/output``` diff --git a/utils/rxiv-download-tool/VERSION b/utils/rxiv-download-tool/VERSION deleted file mode 100644 index 6e8bf73aa..000000000 --- a/utils/rxiv-download-tool/VERSION +++ /dev/null @@ -1 +0,0 @@ -0.1.0 diff --git a/utils/rxiv-download-tool/build-docker.sh b/utils/rxiv-download-tool/build-docker.sh deleted file mode 100644 index d5e4ea257..000000000 --- a/utils/rxiv-download-tool/build-docker.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -# Change the name of the tool here -tool_dir="utils" -tool_name="rxiv-download-tool" - -# The version is read from the VERSION file -version=$( - -2023-12-18T17:52:12Z -http://export.arxiv.org/oai2 - - -
- oai:arXiv.org:1007.1025 - 2023-12-18 - cs - physics:nlin -
- - - Inflection system of a language as a complex network - Fukś, Henryk - Computer Science - Computation and Language - Nonlinear Sciences - Adaptation and Self-Organizing Systems - We investigate inflection structure of a synthetic language using Latin as an -example. We construct a bipartite graph in which one group of vertices -correspond to dictionary headwords and the other group to inflected forms -encountered in a given text. Each inflected form is connected to its -corresponding headword, which in some cases in non-unique. The resulting sparse -graph decomposes into a large number of connected components, to be called word -groups. We then show how the concept of the word group can be used to construct -coverage curves of selected Latin texts. We also investigate a version of the -inflection graph in which all theoretically possible inflected forms are -included. Distribution of sizes of connected components of this graphs -resembles cluster distribution in a lattice percolation near the critical -point. - - Comment: 6 pages, 9 figures - 2010-07-06 - text - http://arxiv.org/abs/1007.1025 - Proceedings of 2009 IEEE Toronto International Conference - - Science and Technology for Humanity, IEEE, Toronto 2009, pp. 491-496 - doi:10.1109/TIC-STH.2009.5444449 - - -
- -
- oai:arXiv.org:1007.1026 - 2023-12-18 - physics:nlin -
- - - On the calibration of neural networks for histological slide-level - classification - Kurz, Alexander - Mehrtens, Hendrik A. - Bucher, Tabea-Clara - Brinker, Titus J. - Electrical Engineering and Systems Science - Image and Video Processing - Computer Science - Computer Vision and Pattern Recognition - Deep Neural Networks have shown promising classification performance when -predicting certain biomarkers from Whole Slide Images in digital pathology. -However, the calibration of the networks' output probabilities is often not -evaluated. Communicating uncertainty by providing reliable confidence scores is -of high relevance in the medical context. In this work, we compare three neural -network architectures that combine feature representations on patch-level to a -slide-level prediction with respect to their classification performance and -evaluate their calibration. As slide-level classification task, we choose the -prediction of Microsatellite Instability from Colorectal Cancer tissue -sections. We observe that Transformers lead to good results in terms of -classification performance and calibration. When evaluating the classification -performance on a separate dataset, we observe that Transformers generalize -best. The investigation of reliability diagrams provides additional insights to -the Expected Calibration Error metric and we observe that especially -Transformers push the output probabilities to extreme values, which results in -overconfident predictions. - - Comment: 7 pages, 2 figures, 2 tables - 2023-12-15 - text - http://arxiv.org/abs/2312.09719 - - -
- -
- oai:arXiv.org:2312.09720 - 2023-12-18 - eess -
- - - RIS-Enabled NLoS Near-Field Joint Position and Velocity Estimation under - User Mobility - Rahal, Moustafa - Denis, Benoit - Keskin, Musa Furkan - Uguen, Bernard - Wymeersch, Henk - Electrical Engineering and Systems Science - Signal Processing - In the context of single-base station (BS) non-line-of-sight (NLoS) -single-epoch localization with the aid of a reflective reconfigurable -intelligent surface (RIS), this paper introduces a novel three-step algorithm -that jointly estimates the position and velocity of a mobile user equipment -(UE), while compensating for the Doppler effects observed in near-field (NF) at -the RIS elements over the short transmission duration of a sequence of downlink -(DL) pilot symbols. First, a low-complexity initialization procedure is -proposed, relying in part on far-field (FF) approximation and a static user -assumption. Then, an alternating optimization procedure is designed to -iteratively refine the velocity and position estimates, as well as the channel -gain. The refinement routines leverage small angle approximations and the -linearization of the RIS response, accounting for both NF and mobility effects. -We evaluate the performance of the proposed algorithm through extensive -simulations under diverse operating conditions with regard to signal-to-noise -ratio (SNR), UE mobility, uncontrolled multipath and RIS-UE distance. Our -results reveal remarkable performance improvements over the state-of-the-art -(SoTA) mobility-agnostic benchmark algorithm, while indicating convergence of -the proposed algorithm to respective theoretical bounds on position and -velocity estimation. - - Comment: 11 pages, 9 figures, journal - 2023-12-15 - text - http://arxiv.org/abs/2312.09720 - - -
-6905935|1001 -
-
diff --git a/utils/rxiv-download-tool/ict.yaml b/utils/rxiv-download-tool/ict.yaml deleted file mode 100644 index f6f92fde2..000000000 --- a/utils/rxiv-download-tool/ict.yaml +++ /dev/null @@ -1,51 +0,0 @@ -author: -- Nick Schaub -- Hamdah Shafqat -contact: nick.schaub@nih.gov -container: polusai/rxiv-download-tool:0.1.0-dev0 -description: This plugin allows to download data from Rxiv website. -entrypoint: python3 -m polus.images.utils.rxiv_download -inputs: -- description: Pull records from open access archives. - format: - - string - name: rxiv - required: true - type: string -- description: Start date. - format: - - string - name: start - required: false - type: string -- description: Generate an output preview. - format: - - boolean - name: preview - required: false - type: boolean -name: polusai/DownloadRxivtextdata -outputs: -- description: Output collection. - format: - - genericData - name: outDir - required: true - type: path -repository: https://github.com/PolusAI/image-tools -specVersion: 1.0.0 -title: Download Rxiv text data -ui: -- description: Pull records from open access archives. - key: inputs.rxiv - title: rxiv - type: text -- description: Start date. - key: inputs.start - title: start - type: text -- description: Generate an output preview. - key: inputs.preview - title: Preview example output of this plugin - type: checkbox -version: 0.1.0-dev0 diff --git a/utils/rxiv-download-tool/plugin.json b/utils/rxiv-download-tool/plugin.json deleted file mode 100644 index 9f7daf370..000000000 --- a/utils/rxiv-download-tool/plugin.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "name": "Rxiv-Download", - "version": "0.1.0", - "title": "Download Rxiv text data", - "description": "This plugin allows to download data from Rxiv website.", - "author": "Nick Schaub (nick.schaub@nih.gov), Hamdah Shafqat Abbasi (hamdahshafqat.abbasi@nih.gov)", - "institution": "National Center for Advancing Translational Sciences, National Institutes of Health", - "repository": "https://github.com/PolusAI/image-tools", - "website": "https://ncats.nih.gov/preclinical/core/informatics", - "citation": "", - "containerId": "polusai/rxiv-download-tool:0.1.0", - "baseCommand": [ - "python3", - "-m", - "polus.images.utils.rxiv_download" - ], - "inputs": [ - { - "name": "rxiv", - "type": "string", - "title": "rxiv", - "description": "Pull records from open access archives.", - "required": "True" - }, - { - "name": "start", - "type": "string", - "title": "start", - "description": "Start date.", - "required": "False" - }, - { - "name": "preview", - "type": "boolean", - "title": "Preview", - "description": "Generate an output preview.", - "required": "False" - } - ], - "outputs": [ - { - "name": "outDir", - "type": "genericData", - "description": "Output collection." - } - ], - "ui": [ - { - "key": "inputs.rxiv", - "type": "string", - "title": "rxiv", - "description": "Pull records from open access archives.", - "required": "True" - }, - { - "key": "inputs.start", - "type": "string", - "title": "start", - "description": "Start date.", - "required": "False" - }, - { - "key": "inputs.preview", - "type": "boolean", - "title": "Preview example output of this plugin", - "description": "Generate an output preview.", - "required": "False" - } - ] -} diff --git a/utils/rxiv-download-tool/pyproject.toml b/utils/rxiv-download-tool/pyproject.toml deleted file mode 100644 index 9941f4b7f..000000000 --- a/utils/rxiv-download-tool/pyproject.toml +++ /dev/null @@ -1,41 +0,0 @@ -[tool.poetry] -name = "polus-images-utils-rxiv-download" -version = "0.1.0" -description = "Fetch text data from rxiv" -authors = [ - "Nick Schaub ", - "Hamdah Shafqat abbasi " - ] -readme = "README.md" -packages = [{include = "polus", from = "src"}] - - -[tool.poetry.dependencies] -python = ">=3.9,<3.12" -typer = "^0.7.0" -requests = "^2.31.0" -rxiv-types = "^0.1.0" -tqdm = "^4.66.1" -xmltodict = "^0.13.0" -pydantic = "1.10.4" - - -[[tool.poetry.source]] -name = "test" -url = "https://test.pypi.org/simple/" -default = false -secondary = true - -[tool.poetry.group.dev.dependencies] -bump2version = "^1.0.1" -flake8 = "^6.0.0" -pre-commit = "^3.2.1" -flake8-docstrings = "^1.7.0" -black = "^23.3.0" -mypy = "^1.1.1" -pytest = "^7.2.2" -ruff = "^0.0.270" - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" diff --git a/utils/rxiv-download-tool/run-plugin.sh b/utils/rxiv-download-tool/run-plugin.sh deleted file mode 100644 index 48c596a7b..000000000 --- a/utils/rxiv-download-tool/run-plugin.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash - -version=$( None: - """Scaled Nyxus plugin allows to extract features from labelled images.""" - logger.info(f"--rxiv = {rxiv}") - logger.info(f"--start = {start}") - logger.info(f"--outDir = {out_dir}") - - if start is not None: - start_date = datetime.strptime(start, "%Y-%m-%d").date() - - out_dir = out_dir.resolve() - - if not out_dir.exists(): - out_dir.mkdir(exist_ok=True) - - assert out_dir.exists(), f"{out_dir} does not exist!! Please check input path again" - - model = ArxivDownload(path=out_dir, rxiv=rxiv, start=start_date) - model.fetch_and_save_records() - - if preview: - generate_preview(out_dir) - logger.info(f"generating preview data in {out_dir}") - - -if __name__ == "__main__": - app() diff --git a/utils/rxiv-download-tool/src/polus/images/utils/rxiv_download/fetch.py b/utils/rxiv-download-tool/src/polus/images/utils/rxiv_download/fetch.py deleted file mode 100644 index b198627fd..000000000 --- a/utils/rxiv-download-tool/src/polus/images/utils/rxiv_download/fetch.py +++ /dev/null @@ -1,217 +0,0 @@ -"""Rxiv Download Plugin.""" -import json -import logging -import os -import shutil -from concurrent.futures import ProcessPoolExecutor -from datetime import datetime -from io import BytesIO -from pathlib import Path -from typing import Optional - -import requests -import xmltodict -from rxiv_types import arxiv_records -from rxiv_types.models.oai_pmh.org.openarchives.oai.pkg_2.resumption_token_type import ( - ResumptionTokenType, -) -from tqdm import tqdm -from xsdata.models.datatype import XmlDate - -logger = logging.getLogger(__name__) -logger.setLevel(os.environ.get("POLUS_LOG", logging.INFO)) - -POLUS_EXT = os.environ.get("POLUS_EXT", ".xml") - -RXIVS = { - "arXiv": {"url": "https://export.arxiv.org/oai2", "stride": 1000}, -} - - -def generate_preview( - path: Path, -) -> None: - """Generate preview of the plugin outputs.""" - prev_file = list( - Path().cwd().parents[4].joinpath("examples").rglob(f"*{POLUS_EXT}"), - )[0] - - shutil.copy(prev_file, path) - - -class ArxivDownload: - """Fetch OAI records from an API. - - Args: - rxiv: The rxiv to pull from. Must be one of ["arXiv"].str - token: A resumption token. Defaults to None. - start: Start date. Only used if `token=None`. - - Returns: - Raw XML bytes. - """ - - def __init__( - self, - path: Path, - rxiv: str, - start: Optional[datetime] = None, - ) -> None: - """Create a ArxivDownload.""" - self.path = path - self.rxiv = rxiv - self.start = start - - if self.rxiv not in RXIVS: - msg = f"{self.rxiv} is an invalid rxiv value. Must be one of {list(RXIVS)}" - raise ValueError( - msg, - ) - - if self.start is None and len(list(self.path.rglob(f"*{POLUS_EXT}"))) == 0: - self.start = datetime(1900, 1, 1) - - elif self.start is None and len(list(self.path.rglob(f"*{POLUS_EXT}"))) != 0: - self.start = self._resume_from() - - self.start = self.start - - self.params = {"verb": "ListRecords"} - - @staticmethod - def path_from_token( - path: Path, - rxiv: str, - start: Optional[datetime] = None, - token: Optional[ResumptionTokenType] = None, - ) -> Path: - """Creating output directory for saving records.""" - if start and token is not None: - file_path = path.joinpath( - f"{rxiv}_" - + f"{start.year}{str(start.month).zfill(2)}{str(start.day).zfill(0)}_" - + f"{int(token.cursor)}{POLUS_EXT}", - ) - - file_path.parent.mkdir(exist_ok=True, parents=True) - - return file_path - - def fetch_records(self) -> bytes: - """Fetch OAI records from an API.""" - # Configure parameters - if self.start is not None: - self.params.update( - { - "from": f"{self.start.year}-" - + f"{str(self.start.month).zfill(2)}-" - + f"{str(self.start.day).zfill(2)}", - "metadataPrefix": "oai_dc", - }, - ) - response = requests.get( - RXIVS["arXiv"]["url"], # type: ignore - params=self.params, - timeout=20, - ) - if response.ok: - logger.info( - f"Successfully hit url: {response.url}", - ) - else: - logger.info( - f"Error pulling data: {response.url} status {response.status_code}", - ) - - return response.content - - @staticmethod - def _get_latest(file: Path) -> datetime: - """Find the latest date to resume download files.""" - fixed_date = datetime(1900, 1, 1) - records = arxiv_records(str(file.absolute())) - if records.list_records is None: - msg = "Record list is empty!! Please download it again" - raise ValueError(msg) - for record in records.list_records.record: - if record.header is None: - msg = "Record header is empty!! Please download it again" - raise ValueError(msg) - if not isinstance(record.header.datestamp, XmlDate): - msg = "Record date is missing!!" - raise ValueError(msg) - record_date = record.header.datestamp.to_datetime() - if record_date > fixed_date: - last = record_date - return last - - def _resume_from(self) -> datetime: - """Find the previous cursor and create a resume token.""" - if not self.path.exists(): - return datetime(1900, 1, 1) - files = [ - f - for f in self.path.iterdir() - if f.is_file() and f.name.startswith(self.rxiv) - ] - - with ProcessPoolExecutor() as executor: - dates = list(executor.map(self._get_latest, files)) - return max(dates) - - @staticmethod - def save_records(path: Path, record: bytes) -> None: - """Writing response content either in XML or JSON format.""" - if POLUS_EXT == ".xml": - with Path.open(path, "wb") as fw: - fw.write(record) - fw.close() - elif POLUS_EXT == ".json": - parsed_data = xmltodict.parse(record, attr_prefix="") - json_data = json.dumps(parsed_data, indent=2) - with Path.open(path, "w") as fw: - fw.write(json_data) - fw.close() - - def fetch_and_save_records(self) -> None: - """Fetch and save response contents.""" - response = self.fetch_records() - - records = arxiv_records(BytesIO(response)) - - if records.list_records is None: - msg = "Unable to download a record" - raise ValueError(msg) - - for record in records.list_records.record: - if record.header is not None and not isinstance( - record.header.datestamp, - XmlDate, - ): - msg = "Error with downloading a XML record" - raise ValueError(msg) - - logger.info("Getting token...") - token = records.list_records.resumption_token - key, _ = token.value.split("|") - index = token.cursor - - if token.complete_list_size is None: - msg = "Error with downloading a XML record" - raise ValueError(msg) - - logger.info(f"Resuming from date: {self.start}") - - for i in tqdm( - range(int(index), token.complete_list_size, 1000), - total=((token.complete_list_size - int(index)) // 1000 + 1), - ): - thread_token = ResumptionTokenType(value="|".join([key, str(i)]), cursor=i) - - file_path = self.path_from_token( - path=self.path, - rxiv=self.rxiv, - start=self.start, - token=thread_token, - ) - self.save_records(path=file_path, record=response) diff --git a/utils/rxiv-download-tool/tests/__init__.py b/utils/rxiv-download-tool/tests/__init__.py deleted file mode 100644 index 17974cdc7..000000000 --- a/utils/rxiv-download-tool/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Rxiv Download Plugin.""" diff --git a/utils/rxiv-download-tool/tests/conftest.py b/utils/rxiv-download-tool/tests/conftest.py deleted file mode 100644 index b1448d501..000000000 --- a/utils/rxiv-download-tool/tests/conftest.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Test fixtures. - -Set up all data used in tests. -""" - -import shutil -import tempfile -from pathlib import Path -from typing import Union - -import pytest - - -def pytest_addoption(parser: pytest.Parser) -> None: - """Add options to pytest.""" - parser.addoption( - "--slow", - action="store_true", - dest="slow", - default=False, - help="run slow tests", - ) - - -def clean_directories() -> None: - """Remove all temporary directories.""" - for d in Path(".").cwd().iterdir(): - if d.is_dir() and d.name.startswith("tmp"): - shutil.rmtree(d) - - -@pytest.fixture() -def output_directory() -> Union[str, Path]: - """Create output directory.""" - return Path(tempfile.mkdtemp(dir=Path.cwd())) - - -@pytest.fixture(params=["2023-12-16", "2023-12-17"]) -def get_params(request: pytest.FixtureRequest) -> pytest.FixtureRequest: - """To get the parameter of the fixture.""" - return request.param diff --git a/utils/rxiv-download-tool/tests/test_cli.py b/utils/rxiv-download-tool/tests/test_cli.py deleted file mode 100644 index f96790981..000000000 --- a/utils/rxiv-download-tool/tests/test_cli.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Test Command line Tool.""" - -from typer.testing import CliRunner -from pathlib import Path -import pytest -from polus.images.utils.rxiv_download.__main__ import app -from .conftest import clean_directories -import time - - -def test_cli(output_directory: Path, get_params: pytest.FixtureRequest) -> None: - """Test the command line.""" - runner = CliRunner() - start = get_params - result = runner.invoke( - app, - [ - "--rxiv", - "arXiv", - "--start", - start, - "--outDir", - output_directory, - ], - ) - - assert result.exit_code == 0 - time.sleep(5) - clean_directories() - - -@pytest.mark.skipif("not config.getoption('slow')") -def test_short_cli(output_directory: Path, get_params: pytest.FixtureRequest) -> None: - """Test short cli command line.""" - runner = CliRunner() - start = get_params - result = runner.invoke( - app, - [ - "-r", - "arXiv", - "-s", - start, - "-o", - output_directory, - ], - ) - - assert result.exit_code == 0 - time.sleep(5) - clean_directories() diff --git a/utils/rxiv-download-tool/tests/test_fetch.py b/utils/rxiv-download-tool/tests/test_fetch.py deleted file mode 100644 index d2130cac9..000000000 --- a/utils/rxiv-download-tool/tests/test_fetch.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Test Command line Tool.""" - -from pathlib import Path -import polus.images.utils.rxiv_download.fetch as ft -from .conftest import clean_directories -import time -import pytest -from datetime import datetime - - -def test_fetch_and_save_records( - output_directory: Path, get_params: pytest.FixtureRequest -) -> None: - """Test record fetching and saving.""" - - start = datetime.strptime(get_params, "%Y-%m-%d").date() - - model = ft.ArxivDownload(path=output_directory, rxiv="arXiv", start=start) - model.fetch_and_save_records() - - out_ext = all([Path(f.name).suffix for f in output_directory.iterdir()]) - - assert out_ext == True - - out_date = [Path(f.name).stem.split("_")[1] for f in output_directory.iterdir()][0] - assert out_date == "".join(get_params.split("-")) - clean_directories() - time.sleep(5) - - -def test_fetch_records( - output_directory: Path, get_params: pytest.FixtureRequest -) -> None: - """Test fetch records.""" - - start = datetime.strptime(get_params, "%Y-%m-%d").date() - - model = ft.ArxivDownload(path=output_directory, rxiv="arXiv", start=start) - response = model.fetch_records() - - assert response != 0 - clean_directories() - time.sleep(5)