Refactor project struct
- Refactor build script - Add python script to check if package exists in apt repo - Refactor woodpecker-ci config - Update makedeb
This commit is contained in:
parent
f7bc1f15a3
commit
0efef82af8
23
.woodpecker/build.yml
Normal file
23
.woodpecker/build.yml
Normal file
|
@ -0,0 +1,23 @@
|
|||
steps:
|
||||
- name: build_all
|
||||
image: debian-pack:0.4
|
||||
secrets:
|
||||
- WEBDAV_USER
|
||||
- WEBDAV_PASS
|
||||
- WEBDAV_HOST
|
||||
- WEBDAV_REPOPATH
|
||||
commands:
|
||||
- rm /tmp/apt-file-exists-cache.json
|
||||
- hook_str="$(realpath ./script/package_exists.py)
|
||||
--filename {}
|
||||
--cache-file /tmp/apt-file-exists-cache.json
|
||||
--package-arch amd64
|
||||
--apt-base https://dufs.leafee98.com/apt/"
|
||||
- ./script/build_all.sh
|
||||
--package packages/dufs-bin
|
||||
--makedeb-path makedeb/makedeb
|
||||
--package-exists-hook "$hook_str"
|
||||
--upload-to-webdav "https://dufs.leafee98.com/apt/income"
|
||||
--webdav-user "$WEBDAV_USER"
|
||||
--webdav-pass "$WEBDAV_PASS"
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
pipeline:
|
||||
bin-pack:
|
||||
image: debian-pack:0.3
|
||||
secrets:
|
||||
- WEBDAV_USER
|
||||
- WEBDAV_PASS
|
||||
- WEBDAV_HOST
|
||||
- WEBDAV_REPOPATH
|
||||
commands:
|
||||
- ./script/build_single.sh $BIN_PACK
|
||||
|
||||
matrix:
|
||||
BIN_PACK:
|
||||
- dufs-bin
|
||||
- filebrowser-bin
|
||||
- forgejo-bin
|
||||
- neovim-bin
|
|
@ -1,21 +0,0 @@
|
|||
pipeline:
|
||||
#dendrite:
|
||||
# image: golang
|
||||
# secrets:
|
||||
# - WEBDAV_USER
|
||||
# - WEBDAV_PASS
|
||||
# - WEBDAV_HOST
|
||||
# - WEBDAV_REPOPATH
|
||||
# commands:
|
||||
# - ./script/build_single.sh dendrite
|
||||
|
||||
static-deployer-git:
|
||||
image: debian-pack:0.3
|
||||
secrets:
|
||||
- WEBDAV_USER
|
||||
- WEBDAV_PASS
|
||||
- WEBDAV_HOST
|
||||
- WEBDAV_REPOPATH
|
||||
commands:
|
||||
- ./script/build_single.sh static-deployer-git
|
||||
|
|
@ -1,6 +1,10 @@
|
|||
FROM debian:bullseye-slim
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
RUN apt update && \
|
||||
apt install --yes curl fakeroot ca-certificates gettext git binutils xz-utils bzip2 zstd unzip && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
RUN true \
|
||||
&& apt update \
|
||||
&& apt install --yes \
|
||||
curl fakeroot ca-certificates gettext \
|
||||
git binutils xz-utils bzip2 zstd unzip \
|
||||
python3 \
|
||||
&& apt clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
|
2
makedeb
2
makedeb
|
@ -1 +1 @@
|
|||
Subproject commit dec5faa54b0c4204880c79ee8390519a9102e2a5
|
||||
Subproject commit 3065562dab1aa1fffc8e0fe3b8885acc93e3c9ad
|
|
@ -1,10 +1,153 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source $(dirname $0)/util.sh
|
||||
|
||||
for i in $(find . -maxdepth 1 -mindepth 1 -type d -not -path './script' -not -path './makedeb' -not -name '.*')
|
||||
do
|
||||
echo === building $i
|
||||
build_single $i
|
||||
echo === built $i
|
||||
function msg_info {
|
||||
(( QUIET )) && return
|
||||
local mesg="$1" ; shift
|
||||
printf "INFO: $mesg\n" "$@"
|
||||
}
|
||||
function msg_warn {
|
||||
(( QUIET )) && return
|
||||
local mesg="$1" ; shift
|
||||
printf "WARN: $mesg\n" "$@"
|
||||
}
|
||||
|
||||
function usage {
|
||||
echo "$_PROGRAM_NAME:"
|
||||
echo " --package <dir-name> add a dir to build, allow use it multi times"
|
||||
echo " --package-dir <dir> all dir in this directory is a package"
|
||||
echo " --makedeb-path <path> specify the path of makedeb"
|
||||
echo " --upload-to-webdav https://webdav_host/path/dir"
|
||||
echo " upload built file to a webdav server"
|
||||
echo " --webdav-user <username> username of webdav server (if required)"
|
||||
echo " --webdav-pass <password> password of webdav server (if required)"
|
||||
echo ' --package-exists-hook "/path/to/hook --args {}"'
|
||||
echo " this program will run the hook to check"
|
||||
echo " if specific file already built, the {} will"
|
||||
echo " be replaced with the final package name,"
|
||||
echo " exit with 0 means package exists"
|
||||
}
|
||||
|
||||
function upload_to_webdav {
|
||||
local file="$1"
|
||||
local webdav_path="$2"
|
||||
local webdav_user="$3"
|
||||
local webdav_pass="$4"
|
||||
|
||||
if [[ -z "$webdav_user" || -z "$webdav_pass" ]] ; then
|
||||
curl --show-error --silent \
|
||||
--upload-file "$file" "${webdav_path}/$file"
|
||||
else
|
||||
curl --show-error --silent --user "${webdav_user}:${webdav_pass}" \
|
||||
--upload-file "$file" "${webdav_path}/$file"
|
||||
fi
|
||||
}
|
||||
function run_hook {
|
||||
local exe_str="$1"
|
||||
local place_holder="$2"
|
||||
local package_name="$3"
|
||||
local final_exe_str="${exe_str/$place_holder/$package_name}"
|
||||
echo $final_exe_str
|
||||
|
||||
$final_exe_str
|
||||
}
|
||||
|
||||
shopt -s nullglob
|
||||
set -o functrace
|
||||
set -o nounset
|
||||
set -o errexit
|
||||
set -o errtrace
|
||||
|
||||
_PROGRAM_NAME="$0"
|
||||
_PACKGES_TO_BUILD=()
|
||||
MAKEDEB_PATH=""
|
||||
UPLOAD_TO_WEBDAV=""
|
||||
WEBDAV_USER=""
|
||||
WEBDAV_PASS=""
|
||||
PACKAGE_DIR=()
|
||||
PACKAGE_EXISTS_HOOK=""
|
||||
QUIET=0
|
||||
|
||||
while (( "$#" >= 1 )); do
|
||||
case "$1" in
|
||||
--package) _PACKGES_TO_BUILD+=("$2") ; shift ;;
|
||||
--package-dir) PACKAGE_DIR+=("$2") ; shift ;;
|
||||
--makedeb-path) MAKEDEB_PATH="$2" ; shift ;;
|
||||
--upload-to-webdav) UPLOAD_TO_WEBDAV="$2" ; shift ;;
|
||||
--webdav-user) WEBDAV_USER="$2" ; shift ;;
|
||||
--webdav-pass) WEBDAV_PASS="$2" ; shift ;;
|
||||
--package-exists-hook) PACKAGE_EXISTS_HOOK="$2" ; shift ;;
|
||||
--)
|
||||
shift
|
||||
MAKEDEB_ARGS=("$@")
|
||||
while (( "$#" > 1 )) ; do # leave one arg for shift command at the end of while loop
|
||||
shift
|
||||
done
|
||||
;;
|
||||
-h|--help) usage ; exit 0 ;;
|
||||
*) echo "Unkown option $1"
|
||||
echo "Use $0 --help for help"
|
||||
exit 1 ;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [[ -z "${MAKEDEB_PATH}" ]] ; then
|
||||
MAKEDEB_PATH="${MAKEDEB_PATH:-"$(which makedeb)"}"
|
||||
else
|
||||
MAKEDEB_PATH="$(realpath "$MAKEDEB_PATH")"
|
||||
fi
|
||||
|
||||
if [[ -n ${PACKAGE_DIR-} && "${#PACKAGE_DIR}" -gt 0 ]] ; then
|
||||
for p in "${PACKAGE_DIR[@]}" ; do
|
||||
for f in "$p"/* ; do
|
||||
_PACKGES_TO_BUILD+=("$f")
|
||||
done
|
||||
done
|
||||
fi
|
||||
|
||||
for package in "${_PACKGES_TO_BUILD[@]}" ; do
|
||||
msg_info "Package to be built: $package"
|
||||
done
|
||||
if [[ "${#_PACKGES_TO_BUILD[@]}" -eq 0 ]] ; then
|
||||
msg_info "No package to build, exiting..."
|
||||
fi
|
||||
|
||||
for package in "${_PACKGES_TO_BUILD[@]}" ; do
|
||||
(
|
||||
msg_info "Start for $package"
|
||||
cd "$package"
|
||||
|
||||
"$MAKEDEB_PATH" --nobuild "${MAKEDEB_ARGS[@]}" && ret=$? || ret=$?
|
||||
if [[ $ret -ne 0 ]] ; then
|
||||
msg_warn "Error occurred when running makedeb, skip this package"
|
||||
exit 4
|
||||
fi
|
||||
|
||||
package_name="$("$MAKEDEB_PATH" --packagelist "${MAKEDEB_ARGS[@]}")"
|
||||
|
||||
if [[ -n "${PACKAGE_EXISTS_HOOK}" ]] ; then
|
||||
ret=0
|
||||
run_hook "$PACKAGE_EXISTS_HOOK" "{}" "$package_name" || ret="$?"
|
||||
|
||||
if [[ $ret -eq 0 ]] ; then
|
||||
msg_info "Package ${package_name} already built, skip"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
"$MAKEDEB_PATH" --noextract "${MAKEDEB_ARGS[@]}" && ret=$? || ret=$?
|
||||
if [[ $ret -ne 0 ]] ; then
|
||||
msg_warn "Error occurred when running makedeb, skip this package"
|
||||
exit 4
|
||||
fi
|
||||
|
||||
if [[ -n "${UPLOAD_TO_WEBDAV}" ]] ; then
|
||||
msg_info "Uploading $package_name to $UPLOAD_TO_WEBDAV"
|
||||
upload_to_webdav "$package_name" "$UPLOAD_TO_WEBDAV" "$WEBDAV_USER" "$WEBDAV_PASS"
|
||||
fi
|
||||
)
|
||||
|
||||
done
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source $(dirname $0)/util.sh
|
||||
|
||||
build_single $1
|
240
script/package_exists.py
Executable file
240
script/package_exists.py
Executable file
|
@ -0,0 +1,240 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import lzma
|
||||
import gzip
|
||||
import bz2
|
||||
import re
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import argparse
|
||||
import copy
|
||||
from typing import Tuple, Union
|
||||
from urllib import request
|
||||
|
||||
logger = logging.getLogger("main")
|
||||
|
||||
class ExceptionDecompress(Exception):
|
||||
pass
|
||||
|
||||
class IllegalFilename(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def parse_filename(filename: str) -> Tuple[str, str]:
|
||||
pattern = re.compile("(?P<name>.*)_(?P<version>.*).deb")
|
||||
m = pattern.match(filename)
|
||||
if m is None:
|
||||
raise IllegalFilename("{} is not like <name>_<version>.deb format".format(filename))
|
||||
|
||||
package_name = m.group("name")
|
||||
package_version = m.group("version")
|
||||
|
||||
return package_name, package_version
|
||||
|
||||
def decompress_file(data: bytes):
|
||||
"""Decompress data automatically based on file magic"""
|
||||
MAGIC_XZ = b'\xfd7zXZ'
|
||||
MAGIC_GZ = b'\x1f\x8b'
|
||||
MAGIC_BZ2 = b'BZ'
|
||||
|
||||
if data.startswith(MAGIC_XZ):
|
||||
util = lzma
|
||||
elif data.startswith(MAGIC_GZ):
|
||||
util = gzip
|
||||
elif data.startswith(MAGIC_BZ2):
|
||||
util = bz2
|
||||
else:
|
||||
raise ExceptionDecompress("File doesn't match any supported magic")
|
||||
return util.decompress(data)
|
||||
|
||||
def http_file_exists(url: str):
|
||||
req = request.Request(url, method="HEAD")
|
||||
try:
|
||||
res = request.urlopen(req)
|
||||
except request.HTTPError:
|
||||
return False
|
||||
|
||||
return res.status == 200
|
||||
|
||||
def http_download(url: str) -> bytes:
|
||||
req = request.Request(url)
|
||||
res = request.urlopen(req)
|
||||
return res.read()
|
||||
|
||||
|
||||
def transform_set_to_list_base(x: Union[list, set, dict], reverse: bool):
|
||||
'''
|
||||
Transform all set objects in dict to list objects, or reverse.
|
||||
|
||||
For example, transform
|
||||
{'url': {'package': ['v2', 'v1']}}
|
||||
to
|
||||
{'url': {'package': {'v2', 'v1'}}}
|
||||
'''
|
||||
if reverse:
|
||||
if type(x) == list:
|
||||
return set(x)
|
||||
else:
|
||||
if type(x) == set:
|
||||
return list(x)
|
||||
|
||||
if type(x) == dict:
|
||||
for k in x:
|
||||
x[k] = transform_set_to_list_base(x[k], reverse)
|
||||
return x
|
||||
|
||||
return x
|
||||
|
||||
# make LSP happy
|
||||
def transform_set_to_list(x: dict, reverse: bool) -> dict:
|
||||
return dict(transform_set_to_list_base(x, reverse))
|
||||
|
||||
|
||||
class PackagesCache:
|
||||
"""
|
||||
Cache packages index, and provide quick check if a package
|
||||
already exists in that Apt repository
|
||||
"""
|
||||
|
||||
def __init__(self, apt_base_url: str):
|
||||
"""
|
||||
:param apt_base_url: initial self.apt_base_url
|
||||
"""
|
||||
apt_base_url = apt_base_url.strip()
|
||||
if apt_base_url.endswith("/"):
|
||||
apt_base_url = apt_base_url.rstrip("/")
|
||||
self.apt_base_url = apt_base_url
|
||||
self.cache = dict() # { url: { package_name: { version } } }
|
||||
|
||||
def clear(self):
|
||||
self.cache = dict()
|
||||
|
||||
def load(self, path: str):
|
||||
with open(path, 'r') as f:
|
||||
t: dict = json.load(f)
|
||||
t = transform_set_to_list(t, True)
|
||||
self.cache = t
|
||||
|
||||
def dump(self, path: str):
|
||||
t = copy.deepcopy(self.cache)
|
||||
t = transform_set_to_list(t, False)
|
||||
with open(path, 'w') as f:
|
||||
json.dump(t, f)
|
||||
|
||||
def _build_index(self, url: str, data: bytes):
|
||||
self._build_index_plain(url, data.decode("utf-8"))
|
||||
|
||||
def _build_index_plain(self, url: str, data: str):
|
||||
content = data.split("\n")
|
||||
|
||||
d = dict()
|
||||
package_name = None
|
||||
package_version = None
|
||||
for line in content:
|
||||
line = line.strip()
|
||||
if len(line) == 0:
|
||||
if package_name is not None and package_version is not None:
|
||||
if not package_name in d.keys():
|
||||
d[package_name] = { package_version }
|
||||
else:
|
||||
d[package_name].add(package_version)
|
||||
logger.debug("add to cache %s: %s", package_name, package_version)
|
||||
package_name = None
|
||||
package_version = None
|
||||
|
||||
if line.startswith("Package: "):
|
||||
package_name = re.sub("Package:[ \t]*", "", line).strip()
|
||||
if line.startswith("Version: "):
|
||||
package_version = re.sub("Version:[ \t]*", "", line).strip()
|
||||
|
||||
self.cache[url] = d
|
||||
|
||||
def _cache_url(self, url: str):
|
||||
# try different compress format first
|
||||
for suffix in [ ".xz", ".gz", ".bz2", "" ]:
|
||||
u = url + suffix
|
||||
if http_file_exists(u):
|
||||
d = http_download(u)
|
||||
if len(suffix) > 0:
|
||||
self._build_index(url, decompress_file(d))
|
||||
else:
|
||||
self._build_index(url, d)
|
||||
return
|
||||
|
||||
raise Exception("No Packages file and its compressed version was found from {}"
|
||||
.format(url))
|
||||
|
||||
def _is_url_cached(self, url: str):
|
||||
return url in self.cache.keys()
|
||||
|
||||
def _construct_url(self, suite: str, component: str, arch: str):
|
||||
return f"{self.apt_base_url}/dists/{suite}/{component}/binary-{arch}/Packages"
|
||||
|
||||
def is_exists(self, package_name: str, package_version: str, package_arch: str,
|
||||
suite: str="stable", component: str="main"):
|
||||
"""
|
||||
:param package_name: "Package" value in debian control
|
||||
:param package_version: "Version" value in debian control
|
||||
:param package_arch: "Architecture" value in debian control
|
||||
:param suite: suite or codename, most time "stable" is a symlink to
|
||||
actual codename and be fine for default
|
||||
:param component: should be "main" in most situations
|
||||
"""
|
||||
url = self._construct_url(suite, component, package_arch)
|
||||
if not self._is_url_cached(url):
|
||||
self._cache_url(url)
|
||||
|
||||
return package_name in self.cache[url] \
|
||||
and package_version in self.cache[url][package_name]
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--apt-base", type=str, required=True,
|
||||
help="Apt repository's url, shoud contains dists/")
|
||||
parser.add_argument("--cache-file", type=str, required=False,
|
||||
help="cache file, will create if not exists")
|
||||
parser.add_argument("--package-name", type=str, required=False,
|
||||
help="package name to check")
|
||||
parser.add_argument("--package-version", type=str, required=False,
|
||||
help="package version to check")
|
||||
parser.add_argument("--filename", type=str, required=False,
|
||||
help="get package name and version from filename")
|
||||
parser.add_argument("--package-arch", type=str, required=True,
|
||||
help="package architecture to check")
|
||||
parser.add_argument("--verbose", type=bool, required=False,
|
||||
help="show more log")
|
||||
arg = parser.parse_args()
|
||||
|
||||
if not ((arg.package_name and arg.package_version) or arg.filename):
|
||||
logging.error("You must either specify --filename or (--package-name and --package-version)")
|
||||
exit(1)
|
||||
|
||||
log_level=logging.INFO
|
||||
if arg.verbose:
|
||||
log_level=logging.DEBUG
|
||||
logging.basicConfig(level=log_level, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||
|
||||
|
||||
p = PackagesCache(arg.apt_base)
|
||||
|
||||
if arg.cache_file and os.path.exists(arg.cache_file):
|
||||
p.load(arg.cache_file)
|
||||
|
||||
if arg.filename:
|
||||
package_name, package_version = parse_filename(arg.filename)
|
||||
else:
|
||||
package_name = arg.package_name
|
||||
package_version = arg.package_version
|
||||
|
||||
ext=4
|
||||
if p.is_exists(package_name, package_version, arg.package_arch):
|
||||
ext=0
|
||||
|
||||
if arg.cache_file:
|
||||
p.dump(arg.cache_file)
|
||||
|
||||
exit(ext)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,50 +0,0 @@
|
|||
#!/usr/bin/evn bash
|
||||
#
|
||||
set -o pipefail
|
||||
set -o errexit
|
||||
set -o errtrace
|
||||
|
||||
WEBDAV_USER=${WEBDAV_USER:?WEBDAV_USER not set}
|
||||
WEBDAV_PASS=${WEBDAV_PASS:?WEBDAV_PASS not set}
|
||||
WEBDAV_HOST=${WEBDAV_HOST:?WEBDAV_HOST not set}
|
||||
WEBDAV_REPOPATH=${WEBDAV_REPOPATH:?WEBDAV_REPOPATH not set}
|
||||
|
||||
function curl_alias {
|
||||
curl --show-error --silent --user "${WEBDAV_USER}:${WEBDAV_PASS}" "$@"
|
||||
}
|
||||
|
||||
function webdav_is_file_exists {
|
||||
local target="${1%/}"
|
||||
local target="${target#/}"
|
||||
local resp=$(curl_alias --head --write-out "%{http_code}" "${WEBDAV_HOST}/${WEBDAV_REPOPATH}/${target}" | tail -n 1)
|
||||
[[ "${resp}" == "200" ]]
|
||||
}
|
||||
|
||||
function webdav_mkcol {
|
||||
local dirname="${1#/}"
|
||||
curl_alias --request MKCOL "${WEBDAV_HOST}/${dirname}"
|
||||
}
|
||||
|
||||
function webdav_upload_file {
|
||||
echo "uploading file $1 to ${WEBDAV_HOST}/${WEBDAV_REPOPATH}/$2 ..."
|
||||
curl_alias --silent --upload-file "$1" "${WEBDAV_HOST}/${WEBDAV_REPOPATH}/$2"
|
||||
echo "file $1 uploaded to ${WEBDAV_HOST}/${WEBDAV_REPOPATH}/$2"
|
||||
}
|
||||
|
||||
function build_single {
|
||||
(
|
||||
local package_name="$1"
|
||||
local package_path="$(realpath "$1")"
|
||||
cd "$package_path"
|
||||
local target_filename="$(../makedeb/makedeb -STF | tail -n 1)"
|
||||
|
||||
if webdav_is_file_exists "${target_filename}" ; then
|
||||
echo "${package_name} already built, skipped."
|
||||
return
|
||||
fi
|
||||
|
||||
../makedeb/makedeb
|
||||
|
||||
webdav_upload_file "${target_filename}" "${target_filename}"
|
||||
)
|
||||
}
|
Loading…
Reference in a new issue