Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
93 changes: 56 additions & 37 deletions bin/package_build.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
#!/usr/bin/python3
from urllib.parse import unquote
from bs4 import BeautifulSoup
import requests
import re
import sys
Expand Down Expand Up @@ -112,46 +114,63 @@ def clefos():
print(f"Saved!\nfilename: {q}")

def fedora():
global DATA,DATA_FILE_LOCATION
global DATA, DATA_FILE_LOCATION
sources = [38, 39, 40]
pkg_reg = r'<a href="(.*)\.rpm"'
pkg_reg = r'<a href="(.*?)\.rpm"'
dirs = '0123456789abcdefghijklmnopqrstuvwxyz'
for i in range(len(sources)):
results = []
q = f'Fedora_{sources[i]}_List.json'
file_name = f'{DATA_FILE_LOCATION}/{q}'
current_link = f'https://dl.fedoraproject.org/pub/fedora-secondary/releases/{sources[i]}/Everything/s390x/os/Packages/'
archived_link = f'https://archives.fedoraproject.org/pub/archive/fedora-secondary/releases/{sources[i]}/Everything/s390x/os/Packages/'
try:
req = requests.get(current_link)
if req.status_code == 404:
current_link = archived_link
req = requests.get(current_link)
if req.status_code == 404:
raise Exception(f"For Fedora {sources[i]}: Current link and Archive link both are down")
else:
print(f'Fedora {sources[i]} has been moved to archive')
except Exception as e:
print("Couldn't pull. Error: ",str(e))

for release in sources:
mirrors = [
f'https://dl.fedoraproject.org/pub/fedora-secondary/releases/{release}/Everything/s390x/os/',
f'https://archives.fedoraproject.org/pub/archive/fedora-secondary/releases/{release}/Everything/s390x/os/'
]
data = requests.get(f'https://mirrors.fedoraproject.org/mirrorlist?repo=fedora-{release}&arch=s390x&country=global')
if data.ok:
soup = BeautifulSoup(data.text, 'html.parser')
mirrors += [line.strip() for line in soup.text.splitlines() if line.startswith(('http', 'ftp'))]
else:
for each in range(len(dirs)):
link = f"{current_link}{dirs[each]}/"
req = requests.get(link)
data = req.text
if req.status_code == 404:
print(f"404 Directory {dirs[each]} not found")
continue
ref_data = re.findall(pkg_reg, data)
results.extend(ref_data)
DATA = open(file_name, 'w')
DATA.write('[\n')
for each in results:
each = each.replace('.s390x', '').replace('.noarch', '')
each = re.sub(r'\.fc\d\d', '', each)
pkg = re.search(r'([\w+\-]+)-([\w\-\.]+)', each)
DATA.write('{"packageName": "'+pkg.group(1)+'","version": "'+pkg.group(2)+'"},\n')
DATA.write('{}\n]')
DATA.close()
print('Failed to fetch mirrorlist. Using default mirrors.')

results = []
q = f'Fedora_{release}_List.json'

for mirror in mirrors:
all_dirs_processed = False
index = 0
while index < len(dirs):
link = f"{mirror}Packages/{dirs[index]}/"
try:
req = requests.get(link, timeout=10)
if req.status_code == 404:
print(f"404 Directory {dirs[index]} not found at {mirror}")
index+=1
continue
req.raise_for_status()
data = unquote(req.text)
except (requests.exceptions.RequestException, Exception) as e:
print(f"Error fetching {link}: {e}")
break # Move to the next mirror
else:
ref_data = re.findall(pkg_reg, data)
results.extend(ref_data)
index+=1
else:
# Completed without error, all directories processed for this mirror
all_dirs_processed = True
break # Break the outer mirror loop

if not all_dirs_processed:
raise Exception(f"Failed to process all directories for Fedora {release}. All mirrors attempted.")

with open(f'{DATA_FILE_LOCATION}/{q}', 'w') as DATA:
DATA.write('[\n')
for each in results:
each = each.replace('.s390x', '').replace('.noarch', '')
each = re.sub(r'\.fc\d\d', '', each)
pkg = re.search(r'([\w+\-]+)-([\w\-\.]+)', each)
DATA.write('{"packageName": "'+pkg.group(1)+'","version": "'+pkg.group(2)+'"},\n')
DATA.write('{}\n]')
DATA.close()
print(f"Saved!\nfilename: {q}")

def almaLinux():
Expand Down