initial commit

This commit is contained in:
BigfootACA 2024-09-19 22:36:34 +08:00
commit 5c98ec52cf
7 changed files with 1449 additions and 0 deletions

53
.gitignore vendored Normal file
View File

@ -0,0 +1,53 @@
*.rej
*.orig
*.swp
*.save*
*.o
*.a
*.out
*.lib
*.obj
*.dll
*.so
*.exe
*.gch
*.plist
*.mo
*.gmo
*.fd
*.iso
*.img
*.img.*
*.qcow2
*.vhd
*.vdi
*.vmdk
*.cpio
*.cpio.*
*.ttf
*.ttc
*.pcf
*.pcf.*
*.efi
*.db
vgcore.*
/build*
initramfs*.*
initrd*.*
System.map*
/cmake-build-*
/.idea
/.vscode
/.cache
CMakeCache.txt
CMakeFiles
Makefile
cmake_install.cmake
node_modules
package.json
package-lock.json
fonts.scale
fonts.dir
/config.json
/*.gpg
/*.key

19
Dockerfile Normal file
View File

@ -0,0 +1,19 @@
FROM archlinux:latest
ARG MIRROR="https://mirrors.tuna.tsinghua.edu.cn"
RUN echo Server = $MIRROR/archlinux/\$repo/os/\$arch > /etc/pacman.d/mirrorlist
RUN pacman-key --init
RUN pacman-key --populate
RUN pacman --noconfirm -Syu \
python python-pip python-setuptools \
python-pycryptodome python-pycryptodomex \
python-requests python-idna python-urllib3 \
python-xmltodict python-crcmod python-six pyalpm \
python-charset-normalizer python-certifi python-pgpy
RUN pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple
RUN pip install --break-system-packages cos-python-sdk-v5 tencentcloud-sdk-python
RUN mkdir /workspace /updater
COPY config.json repo.py upload.html /updater/
COPY keyring.gpg signer.key /updater/
WORKDIR /updater
EXPOSE 8888/tcp
CMD ["python3", "/updater/repo.py"]

33
config.json.example Normal file
View File

@ -0,0 +1,33 @@
{
"repo": "<YOUR REPO>",
"region": "<YOUR REGION>",
"bucket": "<YOUR BUCKET>",
"secret_id": "<YOUR SECRET ID>",
"secret_key": "<YOUR SECRET KEY>",
"arch": ["aarch64"],
"keyring": "keyring.gpg",
"signer": "signer.key",
"signer_passphrase": "<YOUR SIGNER KEY PASSPHRASE>",
"max_sign_file": 8192,
"max_pkg_file": 134217728,
"upload_size": 1073741824,
"upload_files": 100,
"pkg_exts": [
".pkg.tar.gz",
".pkg.tar.xz",
".pkg.tar.zst"
],
"sign_exts": [
".pkg.tar.gz.sig",
".pkg.tar.xz.sig",
".pkg.tar.zst.sig"
],
"workspace": "/workspace",
"upload": "/upload",
"update_page": "upload.html",
"bind": "0.0.0.0",
"port": 8888,
"reserved_pkg": [
"<YOUR REPO>-keyring"
]
}

746
repo.py Normal file
View File

@ -0,0 +1,746 @@
#!/usr/bin/python3
# -*- coding=utf-8
import os
import sys
import json
import time
import logging
import argparse
import threading
import subprocess
from stat import S_ISREG
from pyalpm import Handle
from pgpy import PGPMessage, PGPKey
from qcloud_cos import CosS3Client, CosConfig, CosClientError, CosServiceError
from tencentcloud.common.credential import Credential
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.cdn.v20180606.cdn_client import CdnClient
from tencentcloud.cdn.v20180606.models import PurgeUrlsCacheRequest, PushUrlsCacheRequest
from http.server import ThreadingHTTPServer, SimpleHTTPRequestHandler
from http import HTTPStatus, HTTPMethod
class RepoContext:
config: dict = {}
gpg: dict[PGPKey] = {}
signer: PGPKey = None
cred: Credential = None
cdn: CdnClient = None
cos_client: CosS3Client = None
cos_config: CosConfig = None
pyalpm: Handle = Handle("/", "/var/lib/pacman")
lock: threading.Lock = threading.Lock()
repo_data: RepoContext = RepoContext()
class HTTPRequestHandler(SimpleHTTPRequestHandler):
def write_error(self, response, length: int, headers: dict, code: HTTPStatus = HTTPStatus.OK):
self.send_response(code)
if length > 0:
self.send_header("Content-Length", str(length))
for key, value in headers.items():
self.send_header(key, value)
self.end_headers()
if length > 0:
self.wfile.write(response)
self.wfile.flush()
def write_text(self, response: str, code: HTTPStatus = HTTPStatus.OK):
headers = {'Content-Type': "text/plain"}
self.write_error(bytes(response, "UTF-8"), len(response), headers, code)
def write_json(self, response, code: HTTPStatus = HTTPStatus.OK):
headers = {'Content-Type': "application/json"}
data = json.dumps(response)
self.write_error(bytes(data, "UTF-8"), len(data), headers, code)
def write_html(self, response: str, code: HTTPStatus = HTTPStatus.OK):
headers = {'Content-Type': "text/html"}
self.write_error(bytes(response, "UTF-8"), len(response), headers, code)
@staticmethod
def cleanup_folder() -> tuple:
global repo_data
size = 0
count = 0
try:
now = time.time()
expires = now - 600
path = repo_data.config["upload"]
with repo_data.lock:
for f in os.listdir(path):
full = os.path.join(path, f)
st = os.stat(full)
if not S_ISREG(st.st_mode):
continue
if st.st_mtime < expires:
os.remove(full)
logging.info("cleanup %s" % f)
continue
size = size + st.st_size
count = count + 1
except BaseException as e:
logging.exception(e)
return size, count
def check_folder(self) -> bool:
global repo_data
size, count = self.cleanup_folder()
if count > repo_data.config["upload_files"] or size >= repo_data.config["upload_size"]:
self.write_text("Too many files in upload\n", HTTPStatus.TOO_MANY_REQUESTS)
return False
return True
@staticmethod
def check_msg_own(message: PGPMessage) -> list[PGPKey]:
found = []
for sign in message.signatures:
logging.info("signature signer id %s" % sign.signer)
for k, v in repo_data.gpg.items():
if sign.signer in v.fingerprint.keyid:
key = v.fingerprint
for u in v.userids:
key = u.userid
logging.info("found signer %s" % key)
found.append(v)
return found
@staticmethod
def verify_package(data: bytes, message: PGPMessage, signers: list) -> bool:
success = False
for sign in message.signatures:
for signer in signers:
if signer.verify(data, sign):
logging.info("verify package signature %s successful" % sign.signer)
success = True
return success
@staticmethod
def try_download(key, dest, bucket=None):
global repo_data
success = False
if bucket is None:
bucket = repo_data.config["bucket"]
for i in range(0, 10):
try:
repo_data.cos_client.download_file(
Bucket=bucket, Key=key, DestFilePath=dest
)
success = True
break
except CosClientError or CosServiceError as e:
logging.exception(e)
if not success:
raise Exception("download %s failed" % key)
@staticmethod
def try_upload(key, src, bucket=None):
global repo_data
success = False
if bucket is None:
bucket = repo_data.config["bucket"]
for i in range(0, 10):
try:
repo_data.cos_client.upload_file(
Bucket=bucket, Key=key, LocalFilePath=src
)
success = True
break
except CosClientError or CosServiceError as e:
logging.exception(e)
if not success:
raise Exception("upload %s failed" % key)
@staticmethod
def do_copy(src, dest, bucket=None):
global repo_data
success = False
if bucket is None:
bucket = repo_data.config["bucket"]
try:
repo_data.cos_client.copy_object(
Bucket=bucket, Key=dest,
CopySource={
'Bucket': bucket, 'Key': src,
'Region': repo_data.config["region"]
}
)
success = True
except BaseException:
pass
return success
@staticmethod
def is_exists(key, bucket=None):
global repo_data
if bucket is None:
bucket = repo_data.config["bucket"]
try:
return repo_data.cos_client.object_exists(
Bucket=bucket, Key=key,
)
except BaseException:
return False
pass
@staticmethod
def do_delete(key, bucket=None):
global repo_data
success = False
if bucket is None:
bucket = repo_data.config["bucket"]
try:
repo_data.cos_client.delete_object(Bucket=bucket, Key=key)
success = True
except BaseException:
pass
return success
def try_download_db(self, arch, file):
global repo_data
self.try_download(
"arch/%s/%s" % (arch, file),
"%s/%s" % (repo_data.config["workspace"], file)
)
def try_upload_db(self, arch, file):
global repo_data
self.try_upload(
"arch/%s/%s" % (arch, file),
"%s/%s" % (repo_data.config["workspace"], file)
)
def do_copy_db(self, arch, src, dest):
global repo_data
self.do_copy(
"arch/%s/%s" % (arch, src),
"arch/%s/%s" % (arch, dest),
)
@staticmethod
def format_size(size: int) -> str:
units = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'GB']
unit = units[0]
for unit in units:
if size < 1024:
break
size /= 1024
return "{:.2f} {}".format(size, unit)
def proc_update_db(self, data):
global repo_data
if not isinstance(data, dict):
self.write_text("Request not json\n", HTTPStatus.BAD_REQUEST)
return
if 'arch' not in data or data["arch"] not in repo_data.config["arch"]:
self.write_text("Bad architecture\n", HTTPStatus.BAD_REQUEST)
return
if 'target' not in data or len(data["target"]) <= 0 or '/' in data["target"] or '\\' in data["target"]:
self.write_text("Bad filename\n", HTTPStatus.BAD_REQUEST)
return
now = time.time()
repo = repo_data.config["repo"]
work = repo_data.config["workspace"] + "/"
db = "%s.db" % repo
db_sig = "%s.sig" % db
db_tar = "%s.tar.gz" % db
db_tar_sig = "%s.sig" % db_tar
files = "%s.files" % repo
files_sig = "%s.sig" % files
files_tar = "%s.tar.gz" % files
files_tar_sig = "%s.sig" % files_tar
all_files = [db, db_sig, db_tar, db_tar_sig, files, files_sig, files_tar, files_tar_sig]
path = "%s/%s" % (repo_data.config["upload"], data["target"])
sign = "%s/%s.sig" % (repo_data.config["upload"], data["target"])
try:
with repo_data.lock:
if not os.path.exists(path) or not os.path.exists(sign):
self.write_text("Target not exists\n", HTTPStatus.GONE)
return
for file in all_files:
target = work + file
if os.path.exists(target):
os.remove(target)
if self.is_exists("arch/%s/%s" % (data["arch"], data["target"])) or \
self.is_exists("arch/%s/%s.sig" % (data["arch"], data["target"])):
self.write_text("Target already exists\n", HTTPStatus.CONFLICT)
return
logging.debug("verifying package")
try:
os.utime(sign, (now, now))
with open(sign, "rb") as f:
binary = f.read()
message = PGPMessage.from_blob(binary)
assert message
except BaseException as e:
logging.exception(e)
self.write_text("Bad signature\n", HTTPStatus.NOT_ACCEPTABLE)
return
found = self.check_msg_own(message)
if len(found) <= 0:
logging.info("package signer are not in allow list")
self.write_text("Package signer not in allow list\n", HTTPStatus.FORBIDDEN)
return
try:
os.utime(sign, (now, now))
with open(path, "rb") as f:
binary = f.read()
if len(binary) <= 0:
raise Exception("read data mismatch")
if not self.verify_package(binary, message, found):
logging.info("verify package signature failed")
self.write_text("Bad package signature\n", HTTPStatus.FORBIDDEN)
return
except BaseException as e:
logging.exception(e)
self.write_text("Verify package failed\n", HTTPStatus.NOT_ACCEPTABLE)
return
pkg = repo_data.pyalpm.load_pkg(path)
logging.info("package name: %s" % pkg.name)
logging.info("package version: %s" % pkg.version)
logging.info("package architecture: %s" % pkg.arch)
logging.info("package packager: %s" % pkg.packager)
logging.info("package size: %s" % self.format_size(pkg.size))
logging.info("package installed size: %s" % self.format_size(pkg.isize))
logging.info("package url: %s" % pkg.url)
name = "%s-%s-%s" % (pkg.name, pkg.version, pkg.arch)
if not any(name + ext == data["target"] for ext in repo_data.config["pkg_exts"]):
self.write_text("Bad package name\n", HTTPStatus.NOT_ACCEPTABLE)
return
if not any(pkg.packager in uid.userid for signer in found for uid in signer.userids):
self.write_text("Packager mismatch with PGP userid\n", HTTPStatus.NOT_ACCEPTABLE)
return
if data["arch"] != "any" and data["arch"] not in repo_data.config["arch"]:
self.write_text("Target package architecture unsupported\n", HTTPStatus.NOT_ACCEPTABLE)
return
rst = repo_data.config["restrict_pkg"]
if pkg.name in rst and not any(rst[pkg.name] == fp.fingerprint for fp in found):
self.write_text("Target package name is in restricted list\n", HTTPStatus.FORBIDDEN)
return
logging.info("verify package done")
logging.debug("downloading database")
self.try_download_db(data["arch"], db_tar)
self.try_download_db(data["arch"], files_tar)
logging.info("downloaded database")
logging.debug("updating database")
subprocess.run(
["repo-add", work + db_tar, path],
timeout=30, check=True, cwd=work
)
logging.info("update database done")
logging.debug("signing database")
with repo_data.signer.unlock(repo_data.config["signer_passphrase"]):
with open(work + db_tar, "rb") as f:
cont = f.read()
msg = repo_data.signer.sign(cont)
with open(work + db_tar_sig, "wb") as w:
w.write(bytes(msg))
with open(work + files_tar, "rb") as f:
cont = f.read()
msg = repo_data.signer.sign(cont)
with open(work + files_tar_sig, "wb") as w:
w.write(bytes(msg))
logging.info("sign database done")
logging.debug("uploading package")
self.try_upload("arch/%s/%s" % (data["arch"], data["target"]), path)
os.utime(path, (now, now))
self.try_upload("arch/%s/%s.sig" % (data["arch"], data["target"]), sign)
os.utime(sign, (now, now))
logging.info("uploaded package")
logging.debug("removing old databases")
for file in all_files:
target = "arch/%s/%s" % (data["arch"], file)
target_old = "%s.old" % target
if self.is_exists(target):
if self.is_exists(target_old):
self.do_delete(target_old)
self.do_copy(target, target_old)
self.do_delete(target)
logging.info("removed old databases")
logging.debug("uploading database")
self.try_upload_db(data["arch"], db_tar)
self.try_upload_db(data["arch"], db_tar_sig)
self.try_upload_db(data["arch"], files_tar)
self.try_upload_db(data["arch"], files_tar_sig)
self.do_copy_db(data["arch"], db_tar, db)
self.do_copy_db(data["arch"], db_tar_sig, db_sig)
self.do_copy_db(data["arch"], files_tar, files)
self.do_copy_db(data["arch"], files_tar_sig, files_sig)
logging.info("uploaded database")
if "cdn" in repo_data.config:
logging.debug("purging cdn cache")
domain = repo_data.config["cdn"]
urls = [
"https://%s/arch/%s/%s" % (domain, data["arch"], data["target"]),
"https://%s/arch/%s/%s.sig" % (domain, data["arch"], data["target"]),
]
for file in all_files:
urls.append("https://%s/arch/%s/%s" % (domain, data["arch"], file))
for url in urls:
print("new url: %s" % url)
try:
req = PurgeUrlsCacheRequest()
req.Urls = urls
repo_data.cdn.PurgeUrlsCache(req)
except BaseException as e:
logging.exception(e)
try:
req = PushUrlsCacheRequest()
req.Urls = urls
repo_data.cdn.PushUrlsCache(req)
except BaseException as e:
logging.exception(e)
logging.info("purged cdn cache")
self.write_text("Database updated\n", HTTPStatus.OK)
except BaseException as e:
logging.exception(e)
self.write_text("Error while updating database\n", HTTPStatus.INTERNAL_SERVER_ERROR)
def proc_sign(self, length: int, filename: str):
global repo_data
path = "%s/%s" % (repo_data.config["upload"], filename)
m = repo_data.config["max_sign_file"]
if length > m:
self.write_text(
"Signature too large (maximum %s)\n" % self.format_size(m),
HTTPStatus.REQUEST_ENTITY_TOO_LARGE
)
return
try:
data = self.rfile.read(length)
assert data and len(data) == length
message = PGPMessage.from_blob(data)
assert message
except BaseException as e:
logging.exception(e)
self.write_text("Bad signature\n", HTTPStatus.NOT_ACCEPTABLE)
return
logging.info("process pgp %s" % filename)
if len(self.check_msg_own(message)) <= 0:
logging.info("all signer are not in allow list")
self.write_text("Target signer not in allow list\n", HTTPStatus.FORBIDDEN)
return
with repo_data.lock:
if os.path.exists(path):
os.remove(path)
with open(path, "wb") as f:
f.write(data)
logging.info("saved %s size %s" % (path, self.format_size(length)))
self.write_text("Signature saved\n", HTTPStatus.CREATED)
def proc_pkgs(self, length: int, filename: str):
global repo_data
now = time.time()
path = "%s/%s" % (repo_data.config["upload"], filename)
sign = "%s/%s.sig" % (repo_data.config["upload"], filename)
m = repo_data.config["max_pkg_file"]
if length >= m:
self.write_text(
"Package too large (maximum %s)\n" % self.format_size(m),
HTTPStatus.REQUEST_ENTITY_TOO_LARGE
)
return
with repo_data.lock:
if not os.path.exists(sign):
self.write_text("You need upload signature first\n", HTTPStatus.NOT_ACCEPTABLE)
return
try:
os.utime(sign, (now, now))
with open(sign, "rb") as f:
data = f.read()
message = PGPMessage.from_blob(data)
assert message
except BaseException as e:
logging.exception(e)
self.write_text("Bad signature\n", HTTPStatus.NOT_ACCEPTABLE)
return
found = self.check_msg_own(message)
if len(found) <= 0:
logging.info("package signer are not in allow list")
self.write_text("Package signer not in allow list\n", HTTPStatus.FORBIDDEN)
return
data = self.rfile.read(length)
if len(data) != length:
raise Exception("read data mismatch")
if not self.verify_package(data, message, found):
logging.info("verify package signature failed")
self.write_text("Bad package signature\n", HTTPStatus.FORBIDDEN)
return
if os.path.exists(path):
os.remove(path)
with open(path, "wb") as f:
f.write(data)
logging.info("saved %s size %s bytes" % (path, self.format_size(length)))
self.write_text("File saved\n", HTTPStatus.CREATED)
def proc_get_pkgs(self, filename: str):
path = "%s/%s" % (repo_data.config["upload"], filename)
try:
if not os.path.exists(path):
self.write_text("404 Not Found\n", HTTPStatus.NOT_FOUND)
return
with open(path, "rb") as f:
st = os.fstat(f.fileno())
if not S_ISREG(st.st_mode):
self.write_text("Bad file\n", HTTPStatus.FORBIDDEN)
return
self.send_response(HTTPStatus.OK)
self.send_header("Content-Length", str(st.st_size))
self.send_header("Content-Type", "application/octet-stream")
self.send_header("Content-Disposition", "attachment; filename=\"%s\"" % filename)
self.end_headers()
self.copyfile(f, self.wfile)
except BaseException as e:
logging.exception(e)
self.write_text("Error while delete package\n", HTTPStatus.INTERNAL_SERVER_ERROR)
def proc_get_page(self):
global repo_data
if not isinstance(repo_data.config["update_page"], str):
self.write_text("404 Not Found\n", HTTPStatus.NOT_FOUND)
return
if not os.path.exists(repo_data.config["update_page"]):
self.write_text("404 Not Found\n", HTTPStatus.NOT_FOUND)
return
with open(repo_data.config["update_page"], "r") as f:
self.write_html(f.read())
def proc_api(self, method):
global repo_data
data = None
if method == HTTPMethod.POST:
if 'Content-Length' not in self.headers:
self.write_text("Miss Content-Length\n", HTTPStatus.LENGTH_REQUIRED)
return
length = int(self.headers['Content-Length'])
if length >= 0x8000000:
self.write_text("File too large\n", HTTPStatus.REQUEST_ENTITY_TOO_LARGE)
return
data = self.rfile.read(length)
if "Content-Type" in self.headers:
match self.headers['Content-Type']:
case "text/plain": data = data.decode("UTF-8")
case "application/json": data = json.loads(data.decode("UTF-8"))
if self.path == "/api/info":
self.write_json({
"repo": repo_data.config["repo"],
"arch": repo_data.config["arch"],
"sign_exts": repo_data.config["sign_exts"],
"pkg_exts": repo_data.config["pkg_exts"],
"max_sign_file": repo_data.config["max_sign_file"],
"max_pkg_file": repo_data.config["max_pkg_file"],
"upload_size": repo_data.config["upload_size"],
"upload_files": repo_data.config["upload_files"],
})
return
if self.path == "/api/update":
if method != HTTPMethod.POST:
self.write_text("Need request json\n", HTTPStatus.BAD_REQUEST)
return
self.proc_update_db(data)
return
self.write_text("404 Not Found\n", HTTPStatus.NOT_FOUND)
def do_DELETE(self):
if not self.path.startswith("/"):
self.write_text("404 Not Found\n", HTTPStatus.NOT_FOUND)
return
filename = self.path[1:]
if len(filename) <= 0 or '/' in filename or '\\' in filename:
self.write_text("Invalid filename\n", HTTPStatus.FORBIDDEN)
return
if not self.check_folder():
return
if not filename.endswith(tuple(repo_data.config["sign_exts"])) and \
not filename.endswith(tuple(repo_data.config["pkg_exts"])):
self.write_text(
"Only %s, %s accepts\n" % (
repo_data.config["sign_exts"],
repo_data.config["pkg_exts"],
), HTTPStatus.FORBIDDEN
)
return
try:
path = "%s/%s" % (repo_data.config["upload"], filename)
with repo_data.lock:
if os.path.exists(path):
os.remove(path)
logging.info("removed %s" % path)
self.write_text("Target deleted\n", HTTPStatus.OK)
else:
self.write_text("Target not found\n", HTTPStatus.OK)
except BaseException as e:
logging.exception(e)
self.write_text("Error while delete package\n", HTTPStatus.INTERNAL_SERVER_ERROR)
def do_POST(self):
if self.path.startswith("/api/"):
self.proc_api(HTTPMethod.POST)
return
self.write_text("404 Not Found\n", HTTPStatus.NOT_FOUND)
def do_GET(self):
if self.path == "/" or self.path == "/index.html":
self.proc_get_page()
return
if self.path.startswith("/api/"):
self.proc_api(HTTPMethod.GET)
return
filename = self.path[1:]
if filename == "index.html":
self.proc_get_page()
return
if not self.check_folder():
return
if filename.endswith(tuple(repo_data.config["pkg_exts"])) or \
filename.endswith(tuple(repo_data.config["sign_exts"])):
if len(filename) <= 0 or '/' in filename or '\\' in filename:
self.write_text("Invalid filename\n", HTTPStatus.FORBIDDEN)
return
self.proc_get_pkgs(filename)
self.write_text("404 Not Found\n", HTTPStatus.NOT_FOUND)
def do_PUT(self):
if not self.path.startswith("/"):
self.write_text("404 Not Found\n", HTTPStatus.NOT_FOUND)
return
if 'Content-Length' not in self.headers:
self.write_text("Miss Content-Length\n", HTTPStatus.LENGTH_REQUIRED)
return
length = int(self.headers['Content-Length'])
filename = self.path[1:]
if len(filename) <= 0 or '/' in filename or '\\' in filename:
self.write_text("Invalid filename\n", HTTPStatus.FORBIDDEN)
return
logging.info("target file %s" % filename)
if not self.check_folder():
return
try:
if filename.endswith(tuple(repo_data.config["sign_exts"])):
self.proc_sign(length, filename)
elif filename.endswith(tuple(repo_data.config["pkg_exts"])):
self.proc_pkgs(length, filename)
else:
self.write_text(
"Only %s, %s accepts\n" % (
repo_data.config["sign_exts"],
repo_data.config["pkg_exts"],
), HTTPStatus.FORBIDDEN
)
except BaseException as e:
logging.exception(e)
self.write_text("Error while process content\n", HTTPStatus.INTERNAL_SERVER_ERROR)
def load_key_file(filename):
in_key = False
key_text = ""
with open(filename, "rb") as f:
while True:
line = f.readline()
if not line:
break
line = line.decode("UTF-8")
if line == "-----BEGIN PGP PUBLIC KEY BLOCK-----\n":
if in_key:
logging.warning("unexpected key begin")
in_key = True
key_text = str(line)
elif line == "-----END PGP PUBLIC KEY BLOCK-----\n":
if not in_key:
logging.warning("unexpected key end")
key_text = key_text + str(line)
keys = PGPKey.from_blob(key_text)
load_key(keys[0])
in_key = False
key_text = ""
elif line == "\n":
continue
elif len(line) > 0 and in_key:
key_text = key_text + str(line)
if in_key:
logging.warning("unexpected keys eof")
def load_key(key: PGPKey):
if not key.is_public:
raise Exception("try to load private key")
if key.is_expired:
logging.warning("key %s is expires" % key.fingerprint)
logging.info("found %s" % key.fingerprint)
for uid in key.userids:
logging.info("uid %s" % uid.userid)
repo_data.gpg[key.fingerprint] = key
def main(argv: list) -> int:
global repo_data
prs = argparse.ArgumentParser("Arch Linux Repo Over COS Updater API")
prs.add_argument("-f", "--config-file", help="Set config file", default="config.json")
args = prs.parse_args(argv[1:])
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
if args.config_file:
with open(args.config_file, "r") as f:
repo_data.config = json.load(f)
if isinstance(repo_data.config["keyring"], str):
load_key_file(repo_data.config["keyring"])
elif isinstance(repo_data.config["keyring"], list):
for file in repo_data.config["keyring"]:
load_key_file(file)
else:
raise Exception("no any keyring file found")
if 'signer' not in repo_data.config:
raise Exception("no signer key file found")
repo_data.signer = PGPKey.from_file(repo_data.config["signer"])[0]
if repo_data.signer.is_public:
raise Exception("signer not a private key")
if not repo_data.signer.is_protected:
raise Exception("private key unprotected")
logging.info("loaded %d keys" % len(repo_data.gpg))
repo_data.cred = Credential(
secret_id=repo_data.config["secret_id"],
secret_key=repo_data.config["secret_key"],
)
repo_data.cdn = CdnClient(
repo_data.cred,
repo_data.config["region"]
)
repo_data.cos_config = CosConfig(
Region=repo_data.config["region"],
SecretId=repo_data.config["secret_id"],
SecretKey=repo_data.config["secret_key"],
Scheme="https",
)
repo_data.cos_client = CosS3Client(repo_data.cos_config)
if not os.path.exists(repo_data.config["workspace"]):
os.mkdir(repo_data.config["workspace"])
if not os.path.exists(repo_data.config["upload"]):
os.mkdir(repo_data.config["upload"])
listen = (repo_data.config["bind"], repo_data.config["port"])
with ThreadingHTTPServer(listen, HTTPRequestHandler) as httpd:
print(
f"Serving HTTP on {listen[0]} port {listen[1]} "
f"(http://{listen[0]}:{listen[1]}/) ..."
)
try:
httpd.serve_forever()
except KeyboardInterrupt:
print("\nKeyboard interrupt received, exiting.")
httpd.shutdown()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))

63
sign-pkg.py Normal file
View File

@ -0,0 +1,63 @@
#!/usr/bin/python3
# -*- coding=utf-8
from os import listdir, path
from sys import argv, exit, stdout
from pgpy import PGPKey
from logging import info, basicConfig, INFO
from argparse import ArgumentParser
def LoadKey(key: str) -> PGPKey:
signer = PGPKey.from_file(key)[0]
if signer.is_public:
raise Exception("signer not a private key")
if not signer.is_protected:
raise Exception("private key unprotected")
info("loaded key %s" % key)
return signer
def SignPackage(signer: PGPKey, pwd: str, file: str, sign: str = None):
if sign is None:
sign = file + ".sig"
info("signing %s" % file)
with signer.unlock(pwd):
with open(file, "rb") as r:
msg = signer.sign(r.read())
with open(sign, "wb") as w:
w.write(bytes(msg))
info("wrote signature %s" % sign)
def SignOnePackage(key: str, pwd: str, file: str, sign: str = None):
SignPackage(LoadKey(key), pwd, file, sign)
def main(args: list) -> int:
prs = ArgumentParser("Renegade Project Arch Linux Repo Uploader")
prs.add_argument("-d", "--dir", help="Package folder", required=False)
prs.add_argument("-f", "--file", help="Package file", required=False)
prs.add_argument("-s", "--sign", help="Signature file", required=False)
prs.add_argument("-k", "--key", help="Private key", required=True)
prs.add_argument("-p", "--pwd", help="Key passphrase", required=True)
ps = prs.parse_args(args[1:])
basicConfig(level=INFO, stream=stdout)
key = LoadKey(ps.key)
cnt = 0
if ps.file:
SignPackage(key, ps.pwd, ps.file, ps.sign)
cnt += 1
elif ps.dir:
exts = [".pkg.tar.gz", ".pkg.tar.xz", ".pkg.tar.zst"]
for f in listdir(ps.dir):
full = path.join(ps.dir, f)
if any(f.endswith(ext) for ext in exts):
SignPackage(key, ps.pwd, full)
cnt += 1
if cnt <= 0:
raise Exception("no any package found")
return 0
if __name__ == '__main__':
exit(main(argv))

139
upload-pkg.py Normal file
View File

@ -0,0 +1,139 @@
#!/usr/bin/python3
# -*- coding=utf-8
from pyalpm import Handle
from sys import argv, exit, stdout
from os import listdir
from os.path import exists, basename, join
from logging import warning, info, basicConfig, INFO
from requests import get, post, put, delete, RequestException, Response
from argparse import ArgumentParser
default_server = "https://repo-updater.classfun.cn"
def FormatSize(size: int) -> str:
units = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'GB']
unit = units[0]
for unit in units:
if size < 1024:
break
size /= 1024
return "{:.2f} {}".format(size, unit)
def GetReasonText(res: Response, default: str = "Unknown") -> str:
if "text/plain" not in res.headers["Content-Type"]:
return default
length = res.headers["Content-Length"]
if length is None or int(length) >= 128:
return default
return res.text.strip()
def UploadFile(url: str, file: str, code: int = 201):
with open(file, "rb") as f:
res = put(url, f.read())
text = GetReasonText(res)
if res.status_code != code:
raise RequestException(
"upload %s status not %d: %d (%s)" %
(file, code, res.status_code, text)
)
def UploadPackage(
pkg: str,
sign: str = None,
arch: str = None,
server: str = default_server,
handle: Handle = None
):
if sign is None:
sign = pkg + ".sig"
if handle is None:
handle = Handle("/", "/var/lib/pacman")
if not exists(pkg):
raise FileNotFoundError("Target package file %s not found" % pkg)
if not exists(sign):
raise FileNotFoundError("Target signature file %s not found" % pkg)
if pkg + ".sig" != sign:
warning("signature filename mismatch with package file name")
alpm = handle.load_pkg(pkg)
if not alpm:
raise IOError("Open package %s failed" % pkg)
info("package name: %s" % alpm.name)
info("package version: %s" % alpm.version)
info("package architecture: %s" % alpm.arch)
info("package packager: %s" % alpm.packager)
info("package size: %s" % FormatSize(alpm.size))
info("package installed size: %s" % FormatSize(alpm.isize))
info("package url: %s" % alpm.url)
name = "%s-%s-%s" % (alpm.name, alpm.version, alpm.arch)
res = get("%s/api/info" % server)
if arch is None:
arch = alpm.arch
if arch == "any":
raise ValueError("Unable to detect target architecture")
if res.status_code != 200:
raise RequestException("status not 200: %d" % res.status_code)
base = res.json()
pkg_ext = next((i for i in base["pkg_exts"] if pkg.endswith(i)), None)
sign_ext = next((i for i in base["sign_exts"] if sign.endswith(i)), None)
if pkg_ext is None:
raise ValueError("Unknown package type")
if sign_ext is None:
raise ValueError("Unknown signature type")
if arch not in base["arch"]:
raise ValueError("Target architecture not found")
pkg_name = name + pkg_ext
sign_name = name + sign_ext
if pkg_name != basename(pkg):
warning("package filename mismatch with metadata")
info("upload as %s" % pkg_name)
try:
UploadFile("%s/%s" % (server, sign_name), sign)
UploadFile("%s/%s" % (server, pkg_name), pkg)
res = post(
"%s/api/update" % server,
json={"arch": arch, "target": pkg_name},
headers={"Content-Type": "application/json"}
)
text = GetReasonText(res)
if res.status_code != 200:
raise RequestException(
"update status not 201: %d (%s)" %
(res.status_code, text)
)
info("upload done")
finally:
delete("%s/%s" % (server, pkg_name))
delete("%s/%s" % (server, sign_name))
def main(args: list) -> int:
prs = ArgumentParser("Renegade Project Arch Linux Repo Uploader")
prs.add_argument("-a", "--arch", help="Target repo architecture", required=False)
prs.add_argument("-d", "--dir", help="Package folder", required=False)
prs.add_argument("-p", "--pkg", help="Package tarball file", required=False)
prs.add_argument("-s", "--sign", help="Package signature file", required=False)
prs.add_argument("-u", "--url", help="Updater Server URL", required=False, default=default_server)
ps = prs.parse_args(args[1:])
basicConfig(level=INFO, stream=stdout)
cnt = 0
if ps.pkg:
UploadPackage(ps.pkg, ps.sign, ps.arch, ps.url)
cnt += 1
elif ps.dir:
exts = [".pkg.tar.gz", ".pkg.tar.xz", ".pkg.tar.zst"]
for f in listdir(ps.dir):
full = join(ps.dir, f)
if any(f.endswith(ext) for ext in exts):
UploadPackage(full, None, ps.arch, ps.url)
cnt += 1
if cnt <= 0:
raise Exception("no any package found")
return 0
if __name__ == '__main__':
exit(main(argv))

396
upload.html Normal file
View File

@ -0,0 +1,396 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0" />
<meta http-equiv="X-UA-Compatible" content="ie=edge" />
<title>Upload Packages into Repository</title>
<style>
code.code-block {
display: block;
font-family: monospace;
background: black;
color: white;
width: max-content;
padding: 8px 16px 8px 16px;
margin: 8px;
}
table.line-table {
margin: 8px;
}
table.line-table, table.line-table td, th {
border: 1px solid black;
border-collapse: collapse;
padding: 4px;
}
.center-text {
text-align: center;
}
.green-text {
color: lightgreen;
}
</style>
</head>
<body>
<form id="update"><table><tbody>
<tr>
<td>
<label for="arch">Architecture:</label>
</td>
<td>
<select name="arch" id="arch"></select>
</td>
</tr>
<tr>
<td>
<label for="sign">Signature file:</label>
</td>
<td>
<input type="file" name="sign" id="sign" />
</td>
</tr>
<tr>
<td>
<label for="pkg">Package file:</label>
</td>
<td>
<input type="file" name="pkg" id="pkg" />
</td>
</tr>
<tr>
<td>
<label for="folder">Upload folder:</label>
</td>
<td>
<input type="file" name="folder" id="folder" webkitdirectory mozdirectory />
</td>
</tr>
<tr>
<td colspan="2">
<button type="submit" id="add">Add</button>
</td>
</tr>
</tbody></table></form>
<span id="status"></span>
<br/>
<div>
<table id="upload-table" class="line-table">
<thead>
<tr>
<th>Time</th>
<th>Architecture</th>
<th>Name</th>
<th>Size</th>
<th>Status</th>
</tr>
</thead>
<tbody id="upload">
</tbody>
</table>
<button type="button" id="upload-all">Upload all now</button>
</div>
<br/>
<div>
<table id="info-table" class="line-table">
<thead>
<tr>
<th>/</th>
<th>Maximum size</th>
<th>Accept types</th>
</tr>
</thead>
<tbody id="info">
<tr>
<td colspan="3" class="center-text">Loading...</td>
</tr>
</tbody>
</table>
<br/>
<span>Please make sure package is signed correctly before uploading it</span>
<br/>
<span>
You need to confirm that
<code>PACKAGER</code> and <code>GPGKEY</code>
in <code>makepkg.conf</code> are set correctly
</span>
</div>
<br/>
<div>
<span>Upload via shell:</span>
<code class="code-block" id="shell" language="bash">
<span class="green-text"># Upload signature into cache and verify (must sign with allowed key)</span>
<br/>
curl -X PUT --upload-file pkg-0.1-1-any.pkg.tar.xz.sig {SERVER}/pkg-0.1-1-any.pkg.tar.xz.sig
<br/>
<span class="green-text"># Upload package into cache and verify (must match with signature)</span>
<br/>
curl -X PUT --upload-file pkg-0.1-1-any.pkg.tar.xz {SERVER}/pkg-0.1-1-any.pkg.tar.xz
<br/>
<span class="green-text"># Upload into repo and update database</span>
<br/>
curl -X POST -H 'Content-Type: application/json' --data-raw '{"target":"pkg-0.1-1-any.pkg.tar.xz","arch":"aarch64"}' {SERVER}/api/update
<br/>
<span class="green-text"># Cleanup packages cache</span>
<br/>
curl -X DELETE {SERVER}/pkg-0.1-1-any.pkg.tar.xz
<br/>
curl -X DELETE {SERVER}/pkg-0.1-1-any.pkg.tar.xz.sig
</code>
<pre></pre>
</div>
</body>
<script type="text/javascript">
let config={
max_sign_file:0,
max_pkg_file:0,
sign_exts:[],
pkg_exts:[],
arch:[],
};
let uploading={};
let server=window.location.origin;
const form=document.querySelector("form#update");
const arch=document.querySelector("select#arch");
const info=document.querySelector("tbody#info");
const upload=document.querySelector("tbody#upload");
const sign=document.querySelector("input#sign");
const pkg=document.querySelector("input#pkg");
const folder=document.querySelector("input#folder");
const shell=document.querySelector("code#shell");
const add=document.querySelector("button#add");
const upload_all=document.querySelector("button#upload-all");
function setDisabled(disabled){
arch.disabled=disabled;
sign.disabled=disabled;
pkg.disabled=disabled;
add.disabled=disabled;
folder.disabled=disabled;
upload_all.disabled=disabled;
}
function removeChildren(obj){
while(obj.firstChild)
obj.removeChild(obj.firstChild);
}
function setStatus(val){
const status=document.querySelector("span#status");
if(val&&val.length>0){
status.style.display="block";
status.innerText=val;
}else{
status.style.display="block";
status.innerText=val;
}
}
function formatSize(bytes,dp=1){
if(Math.abs(bytes)<1024)return bytes+' B';
const units=['KiB','MiB','GiB','TiB','PiB','EiB','ZiB','YiB'];
let u=-1;
const r=10**dp;
do{
bytes/=1024;u++;
}while(Math.round(Math.abs(bytes)*r)/r>=1024&&u<units.length-1);
return bytes.toFixed(dp)+' '+units[u];
}
function findExt(name,exts){
for(const i in exts)
if(name.endsWith(exts[i]))
return exts[i];
return null;
}
function getFileName(name,exts){
let ext=findExt(name,exts);
if(ext===null)return null;
return name.substring(0,name.length-ext.length);
}
function checkFile(type,obj,max,exts){
if(obj.files.length!==1)throw Error(`Please select ${type} file`);
const file=obj.files[0];
let found=findExt(file.name,exts);
if(found===null)throw Error(`Only ${exts} accepts in ${type}`);
if(file.size>max)throw Error(`File ${type} too big (${file.max}>${max})`);
return true;
}
async function getReason(res){
let reason=""
if(res.headers.get("Content-Type")==="text/plain")
reason=await res.text();
while(reason.endsWith("\r")||reason.endsWith("\n"))
reason=reason.substring(0,reason.length-1);
return reason.length>0?`(${reason})`:"";
}
async function uploadFile(file){
console.log(`uploading ${file.name}`);
const res=await fetch(`${server}/${file.name}`,{
method:'PUT',
body:await file.arrayBuffer()
});
if(res.status!==201)
throw Error(`status not 201: ${res.status} ${await getReason(res)}`);
console.log(`upload ${file.name} done`);
}
async function deleteFile(file){
console.log(`deleting ${file.name}`);
const res=await fetch(`${server}/${file.name}`,{method:'DELETE'});
if(res.status!==200)
throw Error(`status not 200: ${res.status} ${await getReason(res)}`);
console.log(`delete ${file.name} done`);
}
function addColumn(parent=null,text=null){
let col=document.createElement("td");
if(text!==null)col.innerText=text;
if(parent!==null)parent.appendChild(col);
return col;
}
function loadInfo(){
setDisabled(true);
setStatus("Loading...");
fetch(`${server}/api/info`).then(async res=>{
config=await res.json();
removeChildren(arch);
for(const i in config.arch){
const opt=document.createElement("option");
opt.value=config.arch[i];
opt.innerText=config.arch[i];
arch.appendChild(opt);
}
const addInfoRow=(title,size,exts)=>{
let row=document.createElement("tr");
addColumn(row,title);
addColumn(row,formatSize(size));
addColumn(row,exts);
info.appendChild(row);
return row;
}
removeChildren(info);
addInfoRow("Package",config.max_pkg_file,config.pkg_exts);
addInfoRow("Signature",config.max_sign_file,config.sign_exts);
setDisabled(false);
setStatus(null);
}).catch(err=>{
console.error(err);
setDisabled(true);
setStatus(`Load failed: ${err.message}`);
});
}
async function uploadOne(item){
const set_status=val=>{
item.status_obj.innerText=val;
}
try{
set_status(`Uploading signature file`);
await uploadFile(item.sign_file);
set_status(`Uploading package file`);
await uploadFile(item.pkg_file);
set_status(`Updating database`);
const res=await fetch(`${server}/api/update`,{
method:'POST',
headers:{'Content-Type':'application/json'},
body:JSON.stringify({
target:item.pkg_file.name,
arch:item.arch,
})
});
if(res.status!==200)
throw Error(`status not 200: ${res.status} ${await getReason(res)}`);
set_status(`Upload done`);
item.uploaded=true;
}catch(err){
console.error(err);
set_status(`Upload failed: ${err.message}`);
}
const d1=deleteFile(item.sign_file);
const d2=deleteFile(item.pkg_file);
await Promise.all([d1,d2]);
}
function addOne(name){
if(!(name in uploading))return;
let obj=uploading[name];
if(!("sign_file" in obj)||!("pkg_file" in obj))return;
if(!("date" in obj))obj.date=new Date();
if(!("uploaded" in obj))obj.uploaded=false;
if(!("row_obj" in obj)){
obj.row_obj=document.createElement("tr");
obj.date_obj=addColumn(obj.row_obj,obj.date.toLocaleString());
obj.arch_obj=addColumn(obj.row_obj);
obj.name_obj=addColumn(obj.row_obj,name);
obj.size_obj=addColumn(obj.row_obj);
obj.status_obj=addColumn(obj.row_obj,"Waiting");
obj.row_obj.dataset.obj=obj;
upload.appendChild(obj.row_obj);
}
obj.arch_obj.innerText=obj.arch;
obj.size_obj.innerText=formatSize(obj.pkg_file.size);
}
function doAdd(){
try{
if(!arch.value||arch.value.length<=0)throw Error("No architecture selected");
const have_pkg=pkg.files.length===1;
const have_sign=sign.files.length===1;
const have_folder=folder.files.length>=1;
const have_single=have_pkg&&have_sign;
if(have_single){
if(have_pkg&&!have_sign)throw Error("Miss signature");
if(!have_pkg&&have_sign)throw Error("Miss package");
checkFile("signature",sign,config.max_sign_file,config.sign_exts);
checkFile("package",pkg,config.max_pkg_file,config.pkg_exts);
if(sign.files[0].name!==pkg.files[0].name+".sig")
throw Error("Signature mismatch with package file");
const filename=getFileName(pkg.files[0].name,config.pkg_exts);
if(filename===null)throw Error("Bad package filename");
if(!(filename in uploading))uploading[filename]={};
uploading[filename].sign_file=sign.files[0];
uploading[filename].pkg_file=pkg.files[0];
uploading[filename].arch=arch.value;
addOne(filename);
}
if(have_folder)for(let i=0;i<folder.files.length;i++){
let filename;
const file=folder.files[i];
if((filename=getFileName(file.name,config.pkg_exts))!==null){
if(!(filename in uploading))uploading[filename]={};
uploading[filename].pkg_file=file;
uploading[filename].arch=arch.value;
addOne(filename);
}
if((filename=getFileName(file.name,config.sign_exts))!==null){
if(!(filename in uploading))uploading[filename]={};
uploading[filename].sign_file=file;
uploading[filename].arch=arch.value;
addOne(filename);
}
}
if(!have_single&&!have_folder)
throw Error("No any files selected");
setStatus(null);
}catch(err){
console.error(err);
setStatus(`Add failed: ${err.message}`);
}
setDisabled(false);
}
async function doUploadAll(){
setDisabled(true);
setStatus("Uploading");
try{
for(const name in uploading){
const item=uploading[name];
if(!item.uploaded)await uploadOne(item);
}
setStatus("Done");
}catch(err){
console.error(err);
setStatus(`Upload failed: ${err.message}`);
}
setDisabled(false);
}
window.addEventListener("load",()=>{
loadInfo();
shell.innerHTML=shell.innerHTML.replaceAll("{SERVER}",server);
form.onsubmit=()=>{
doAdd();
return false;
};
upload_all.addEventListener("click",()=>doUploadAll());
});
</script>
</html>