Skip to content
This repository was archived by the owner on Feb 21, 2025. It is now read-only.

Commit 917b49c

Browse files
committed
Merge branch 'dev'
2 parents e673fc2 + dee1de2 commit 917b49c

File tree

4 files changed

+23
-15
lines changed

4 files changed

+23
-15
lines changed

.github/workflows/test_package.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ jobs:
77
runs-on: ubuntu-latest
88
strategy:
99
matrix:
10-
python-version: ["3.8", "3.9", "3.10", "3.11"]
10+
python-version: ["3.9", "3.10", "3.11"]
1111

1212
steps:
1313
- uses: actions/checkout@v4

.vscode/settings.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
{
2-
"python.pythonPath": "/Users/fabioespinosa/Desktop/runregistry/runregistry_api_client/venv/bin/python3.6",
3-
"python.linting.enabled": false,
4-
"python.formatting.provider": "yapf"
5-
}
2+
"python.linting.enabled": true,
3+
"python.formatting.provider": "ruff",
4+
"python.analysis.typeCheckingMode": "basic"
5+
}

runregistry/runregistry.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def setup(target):
5252
target_application = "cmsrunregistry-sso-proxy"
5353

5454

55-
def _get_headers(token: str = None):
55+
def _get_headers(token: str = ""):
5656
headers = {"Content-type": "application/json"}
5757
if not use_cookies:
5858
headers["email"] = email
@@ -199,7 +199,7 @@ def get_dataset(run_number, dataset_name="online", **kwargs):
199199
return dataset[0]
200200

201201

202-
def get_datasets(limit=40000, compress_attributes=True, **kwargs):
202+
def get_datasets(limit=40000, compress_attributes=True, **kwargs) -> list:
203203
"""
204204
Gets all datasets that match the filter given
205205
:param compress_attributes: Gets the attributes inside rr_attributes:* and the ones in the DatasetTripletCache (The lumisections insdie the run/dataset) and spreads them over the run object
@@ -226,7 +226,7 @@ def get_datasets(limit=40000, compress_attributes=True, **kwargs):
226226
print(
227227
"ERROR: For queries that retrieve more than 20,000 datasets, you must pass a filter into get_datasets, an empty filter get_datasets(filter={}) works"
228228
)
229-
return None
229+
return []
230230
for page_number in range(1, page_count):
231231
additional_datasets = _get_page(
232232
page=page_number, url=url, data_type="datasets", **kwargs
@@ -251,6 +251,14 @@ def get_datasets(limit=40000, compress_attributes=True, **kwargs):
251251
return datasets
252252

253253

254+
def get_cycles():
255+
url = "{}/cycles/global".format(api_url)
256+
headers = _get_headers(token=_get_token())
257+
if os.getenv("ENVIRONMENT") == "development":
258+
print(url)
259+
return requests.get(url, headers=headers).json()
260+
261+
254262
def _get_lumisection_helper(url, run_number, dataset_name="online", **kwargs):
255263
"""
256264
Puts the headers for all other lumisection methods
@@ -307,7 +315,7 @@ def generate_json(json_logic, **kwargs):
307315
DO NOT USE, USE THE ONE BELOW (create_json)...
308316
It receives a json logic configuration and returns a json with lumisections which pass the filter
309317
"""
310-
if isinstance(json_logic, str) == False:
318+
if not isinstance(json_logic, str):
311319
json_logic = json.dumps(json_logic)
312320
url = "{}/json_creation/generate".format(api_url)
313321
headers = _get_headers(token=_get_token())
@@ -321,7 +329,7 @@ def create_json(json_logic, dataset_name_filter, **kwargs):
321329
"""
322330
It adds a json to the queue and polls until json is either finished or an error occured
323331
"""
324-
if isinstance(json_logic, str) == False:
332+
if not isinstance(json_logic, str):
325333
json_logic = json.dumps(json_logic)
326334
url = "{}/json_portal/generate".format(api_url)
327335

runregistry/utils.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def __parse_runs_arg(runs):
2020
try:
2121
runs = int(runs)
2222
return [runs]
23-
except:
23+
except Exception:
2424
return []
2525
elif isinstance(runs, list):
2626
return runs
@@ -32,7 +32,7 @@ def transform_to_rr_run_filter(run_filter):
3232
:param run_filter: a filter that the user inputs into the api client
3333
:return: returns a filter that runregistry back end understands.
3434
"""
35-
if run_filter == None:
35+
if not run_filter:
3636
return {}
3737
transformed_filter = {}
3838
for key, value in run_filter.items():
@@ -50,7 +50,7 @@ def transform_to_rr_run_filter(run_filter):
5050
elif key in run_rr_attributes:
5151
transformed_filter["rr_attributes." + key] = value
5252
elif key in run_triplet_attributes:
53-
if "=" in value and type(value["="]) == str:
53+
if "=" in value and isinstance(value["="], str):
5454
# if it is a string, we know for sure its either GOOD, BAD, STANDBY, ETC...
5555
value = value["="].upper()
5656
if value not in (
@@ -91,7 +91,7 @@ def transform_to_rr_dataset_filter(dataset_filter):
9191
:param dataset_filter: a filter that the user inputs into the api client
9292
:return: returns a filter that runregistry back end understands.
9393
"""
94-
if dataset_filter == None:
94+
if not dataset_filter:
9595
return {}
9696
transformed_filter = {}
9797
for key, value in dataset_filter.items():
@@ -112,7 +112,7 @@ def transform_to_rr_dataset_filter(dataset_filter):
112112
elif key in dataset_attributes:
113113
transformed_filter["dataset_attributes." + key] = value
114114
elif key in dataset_triplet_attributes:
115-
if type(value["="]) == str:
115+
if isinstance(value["="], str):
116116
# if it is a string, we know for sure its either GOOD, BAD, STANDBY, ETC...
117117
value = value["="].upper()
118118
if value not in ("GOOD", "BAD", "STANDBY", "EXCLUDED", "NOTSET", "EMPTY"):

0 commit comments

Comments
 (0)