Compare commits

..

No commits in common. "cae4e11877278cf61353893ad12d2d0e9f9c985c" and "3290fd171d7d0e6394bb1bfac97b6f8fad4a8055" have entirely different histories.

26 changed files with 213 additions and 431 deletions

6
.gitignore vendored
View file

@ -1,8 +1,8 @@
/secrets.env
/packages/*/*
!/packages/*/DEBBUILD
!/packages/*/materials
/*/*
!/*/DEBBUILD
!/*/materials
!/script/*
!/makedeb

View file

@ -1,29 +0,0 @@
steps:
- name: build_all
image: debian-pack:0.4
secrets:
- WEBDAV_USER
- WEBDAV_PASS
- WEBDAV_HOST
- WEBDAV_REPOPATH
commands:
- rm /tmp/apt-file-exists-cache.json
- hook_str="$(realpath ./script/package_exists.py)
--filename {}
--cache-file /tmp/apt-file-exists-cache.json
--package-arch amd64
--apt-base https://dufs.leafee98.com/apt/"
- ./script/build_all.sh
--makedeb-path makedeb/makedeb
--package-exists-hook "$hook_str"
--upload-to-webdav "https://dufs.leafee98.com/apt/income"
--webdav-user "$WEBDAV_USER"
--webdav-pass "$WEBDAV_PASS"
--package packages/apt-repo-updater
--package packages/dufs-bin
--package packages/filebrowser-bin
--package packages/forgejo-bin
--package packages/neovim-bin
--package packages/plik-bin
--package packages/static-deployer-git

17
.woodpecker/build_bin.yml Normal file
View file

@ -0,0 +1,17 @@
pipeline:
bin-pack:
image: debian-pack:0.3
secrets:
- WEBDAV_USER
- WEBDAV_PASS
- WEBDAV_HOST
- WEBDAV_REPOPATH
commands:
- ./script/build_single.sh $BIN_PACK
matrix:
BIN_PACK:
- dufs-bin
- filebrowser-bin
- forgejo-bin
- neovim-bin

View file

@ -0,0 +1,21 @@
pipeline:
#dendrite:
# image: golang
# secrets:
# - WEBDAV_USER
# - WEBDAV_PASS
# - WEBDAV_HOST
# - WEBDAV_REPOPATH
# commands:
# - ./script/build_single.sh dendrite
static-deployer-git:
image: debian-pack:0.3
secrets:
- WEBDAV_USER
- WEBDAV_PASS
- WEBDAV_HOST
- WEBDAV_REPOPATH
commands:
- ./script/build_single.sh static-deployer-git

View file

@ -1,10 +1,6 @@
FROM debian:bookworm-slim
FROM debian:bullseye-slim
RUN true \
&& apt update \
&& apt install --yes \
curl fakeroot ca-certificates gettext \
git binutils xz-utils bzip2 zstd unzip \
python3 \
&& apt clean \
&& rm -rf /var/lib/apt/lists/*
RUN apt update && \
apt install --yes curl fakeroot ca-certificates gettext git binutils xz-utils bzip2 zstd unzip && \
apt clean && \
rm -rf /var/lib/apt/lists/*

15
filebrowser-bin/README.md Normal file
View file

@ -0,0 +1,15 @@
# FileBrowser
Scripts to build an FileBrowser from github release binarys
## Usage
```
bash filebrowser.sh v2.22.4
```
And the built packages are under `packages` directory.
## Credit
- [FileBrowser](https://github.com/filebrowser/filebrowser)

View file

@ -0,0 +1,90 @@
#!/usr/bin/env bash
version="${1:?You need to specific a version, eg: v1.22.2}"
version_num="${version##v}"
package_name=filebrowser-${version_num}.tar.gz
# must occur dirs
material_dir="./materials"
# optional dirs (created during build)
download_dir="./var/downloads"
extract_dir="./var/extracted"
rootfs="./var/rootfs"
package_dir="./packages"
function copy_file {
local src="$1"
local dst="$2"
# if ends with a splash
if [[ "$dst" == */ ]] ; then
mkdir -p $dst
cp --target-directory $dst $src
else
local dst_dir=$(dirname $dst)
mkdir -p $dst_dir
cp --no-target-directory $src $dst
fi
}
function download {
# don't re-download if already downloaded
if [ ! -f "${download_dir}/${package_name}" ] ; then
mkdir -p ${download_dir}
wget --output-document ${download_dir}/${package_name} https://github.com/filebrowser/filebrowser/releases/download/${version}/linux-amd64-filebrowser.tar.gz
fi
}
function extract {
rm -rf ${extract_dir}
mkdir -p ${extract_dir}
tar -xf ${download_dir}/${package_name} -C ${extract_dir}
}
function build_rootfs {
rm -rf ${rootfs}
copy_file ${extract_dir}/filebrowser ${rootfs}/usr/bin/filebrowser
copy_file ${extract_dir}/CHANGELOG.md ${rootfs}/usr/share/filebrowser/
copy_file ${extract_dir}/LICENSE ${rootfs}/usr/share/filebrowser/
copy_file ${extract_dir}/README.md ${rootfs}/usr/share/filebrowser/
copy_file ${material_dir}/filebrowser.service ${rootfs}/usr/lib/systemd/system/filebrowser.service
}
function package_meta {
# debian packages meta file
mkdir -p $rootfs/DEBIAN
sed "s/+++VERSION+++/${version_num}/" > $rootfs/DEBIAN/control << EOF
Package: filebrowser
Version: +++VERSION+++
Priority: optional
Architecture: all
Maintainer: leafee98 <me@leafee98.com>
Description: Web File Browser
EOF
}
function main {
download
extract
build_rootfs
package_meta
mkdir -p $package_dir
dpkg-deb --build --root-owner-group $rootfs $package_dir/filebrowser-${version}.deb
}
main "$@"

@ -1 +1 @@
Subproject commit 3065562dab1aa1fffc8e0fe3b8885acc93e3c9ad
Subproject commit dec5faa54b0c4204880c79ee8390519a9102e2a5

View file

@ -1,153 +1,10 @@
#!/usr/bin/env bash
source $(dirname $0)/util.sh
function msg_info {
(( QUIET )) && return
local mesg="$1" ; shift
printf "INFO: $mesg\n" "$@"
}
function msg_warn {
(( QUIET )) && return
local mesg="$1" ; shift
printf "WARN: $mesg\n" "$@"
}
function usage {
echo "$_PROGRAM_NAME:"
echo " --package <dir-name> add a dir to build, allow use it multi times"
echo " --package-dir <dir> all dir in this directory is a package"
echo " --makedeb-path <path> specify the path of makedeb"
echo " --upload-to-webdav https://webdav_host/path/dir"
echo " upload built file to a webdav server"
echo " --webdav-user <username> username of webdav server (if required)"
echo " --webdav-pass <password> password of webdav server (if required)"
echo ' --package-exists-hook "/path/to/hook --args {}"'
echo " this program will run the hook to check"
echo " if specific file already built, the {} will"
echo " be replaced with the final package name,"
echo " exit with 0 means package exists"
}
function upload_to_webdav {
local file="$1"
local webdav_path="$2"
local webdav_user="$3"
local webdav_pass="$4"
if [[ -z "$webdav_user" || -z "$webdav_pass" ]] ; then
curl --show-error --silent \
--upload-file "$file" "${webdav_path}/$file"
else
curl --show-error --silent --user "${webdav_user}:${webdav_pass}" \
--upload-file "$file" "${webdav_path}/$file"
fi
}
function run_hook {
local exe_str="$1"
local place_holder="$2"
local package_name="$3"
local final_exe_str="${exe_str/$place_holder/$package_name}"
echo $final_exe_str
$final_exe_str
}
shopt -s nullglob
set -o functrace
set -o nounset
set -o errexit
set -o errtrace
_PROGRAM_NAME="$0"
_PACKGES_TO_BUILD=()
MAKEDEB_PATH=""
UPLOAD_TO_WEBDAV=""
WEBDAV_USER=""
WEBDAV_PASS=""
PACKAGE_DIR=()
PACKAGE_EXISTS_HOOK=""
QUIET=0
while (( "$#" >= 1 )); do
case "$1" in
--package) _PACKGES_TO_BUILD+=("$2") ; shift ;;
--package-dir) PACKAGE_DIR+=("$2") ; shift ;;
--makedeb-path) MAKEDEB_PATH="$2" ; shift ;;
--upload-to-webdav) UPLOAD_TO_WEBDAV="$2" ; shift ;;
--webdav-user) WEBDAV_USER="$2" ; shift ;;
--webdav-pass) WEBDAV_PASS="$2" ; shift ;;
--package-exists-hook) PACKAGE_EXISTS_HOOK="$2" ; shift ;;
--)
shift
MAKEDEB_ARGS=("$@")
while (( "$#" > 1 )) ; do # leave one arg for shift command at the end of while loop
shift
done
;;
-h|--help) usage ; exit 0 ;;
*) echo "Unkown option $1"
echo "Use $0 --help for help"
exit 1 ;;
esac
shift
for i in $(find . -maxdepth 1 -mindepth 1 -type d -not -path './script' -not -path './makedeb' -not -name '.*')
do
echo === building $i
build_single $i
echo === built $i
done
if [[ -z "${MAKEDEB_PATH}" ]] ; then
MAKEDEB_PATH="${MAKEDEB_PATH:-"$(which makedeb)"}"
else
MAKEDEB_PATH="$(realpath "$MAKEDEB_PATH")"
fi
if [[ -n ${PACKAGE_DIR-} && "${#PACKAGE_DIR}" -gt 0 ]] ; then
for p in "${PACKAGE_DIR[@]}" ; do
for f in "$p"/* ; do
_PACKGES_TO_BUILD+=("$f")
done
done
fi
for package in "${_PACKGES_TO_BUILD[@]}" ; do
msg_info "Package to be built: $package"
done
if [[ "${#_PACKGES_TO_BUILD[@]}" -eq 0 ]] ; then
msg_info "No package to build, exiting..."
fi
for package in "${_PACKGES_TO_BUILD[@]}" ; do
(
msg_info "Start for $package"
cd "$package"
"$MAKEDEB_PATH" --nobuild "${MAKEDEB_ARGS[@]}" && ret=$? || ret=$?
if [[ $ret -ne 0 ]] ; then
msg_warn "Error occurred when running makedeb, skip this package"
exit 4
fi
package_name="$("$MAKEDEB_PATH" --packagelist "${MAKEDEB_ARGS[@]}")"
if [[ -n "${PACKAGE_EXISTS_HOOK}" ]] ; then
ret=0
run_hook "$PACKAGE_EXISTS_HOOK" "{}" "$package_name" || ret="$?"
if [[ $ret -eq 0 ]] ; then
msg_info "Package ${package_name} already built, skip"
exit 0
fi
fi
"$MAKEDEB_PATH" --noextract "${MAKEDEB_ARGS[@]}" && ret=$? || ret=$?
if [[ $ret -ne 0 ]] ; then
msg_warn "Error occurred when running makedeb, skip this package"
exit 4
fi
if [[ -n "${UPLOAD_TO_WEBDAV}" ]] ; then
msg_info "Uploading $package_name to $UPLOAD_TO_WEBDAV"
upload_to_webdav "$package_name" "$UPLOAD_TO_WEBDAV" "$WEBDAV_USER" "$WEBDAV_PASS"
fi
)
done

5
script/build_single.sh Executable file
View file

@ -0,0 +1,5 @@
#!/usr/bin/env bash
source $(dirname $0)/util.sh
build_single $1

View file

@ -1,240 +0,0 @@
#!/usr/bin/env python3
import lzma
import gzip
import bz2
import re
import os
import json
import logging
import argparse
import copy
from typing import Tuple, Union
from urllib import request
logger = logging.getLogger("main")
class ExceptionDecompress(Exception):
pass
class IllegalFilename(Exception):
pass
def parse_filename(filename: str) -> Tuple[str, str]:
pattern = re.compile("(?P<name>.*)_(?P<version>.*).deb")
m = pattern.match(filename)
if m is None:
raise IllegalFilename("{} is not like <name>_<version>.deb format".format(filename))
package_name = m.group("name")
package_version = m.group("version")
return package_name, package_version
def decompress_file(data: bytes):
"""Decompress data automatically based on file magic"""
MAGIC_XZ = b'\xfd7zXZ'
MAGIC_GZ = b'\x1f\x8b'
MAGIC_BZ2 = b'BZ'
if data.startswith(MAGIC_XZ):
util = lzma
elif data.startswith(MAGIC_GZ):
util = gzip
elif data.startswith(MAGIC_BZ2):
util = bz2
else:
raise ExceptionDecompress("File doesn't match any supported magic")
return util.decompress(data)
def http_file_exists(url: str):
req = request.Request(url, method="HEAD")
try:
res = request.urlopen(req)
except request.HTTPError:
return False
return res.status == 200
def http_download(url: str) -> bytes:
req = request.Request(url)
res = request.urlopen(req)
return res.read()
def transform_set_to_list_base(x: Union[list, set, dict], reverse: bool):
'''
Transform all set objects in dict to list objects, or reverse.
For example, transform
{'url': {'package': ['v2', 'v1']}}
to
{'url': {'package': {'v2', 'v1'}}}
'''
if reverse:
if type(x) == list:
return set(x)
else:
if type(x) == set:
return list(x)
if type(x) == dict:
for k in x:
x[k] = transform_set_to_list_base(x[k], reverse)
return x
return x
# make LSP happy
def transform_set_to_list(x: dict, reverse: bool) -> dict:
return dict(transform_set_to_list_base(x, reverse))
class PackagesCache:
"""
Cache packages index, and provide quick check if a package
already exists in that Apt repository
"""
def __init__(self, apt_base_url: str):
"""
:param apt_base_url: initial self.apt_base_url
"""
apt_base_url = apt_base_url.strip()
if apt_base_url.endswith("/"):
apt_base_url = apt_base_url.rstrip("/")
self.apt_base_url = apt_base_url
self.cache = dict() # { url: { package_name: { version } } }
def clear(self):
self.cache = dict()
def load(self, path: str):
with open(path, 'r') as f:
t: dict = json.load(f)
t = transform_set_to_list(t, True)
self.cache = t
def dump(self, path: str):
t = copy.deepcopy(self.cache)
t = transform_set_to_list(t, False)
with open(path, 'w') as f:
json.dump(t, f)
def _build_index(self, url: str, data: bytes):
self._build_index_plain(url, data.decode("utf-8"))
def _build_index_plain(self, url: str, data: str):
content = data.split("\n")
d = dict()
package_name = None
package_version = None
for line in content:
line = line.strip()
if len(line) == 0:
if package_name is not None and package_version is not None:
if not package_name in d.keys():
d[package_name] = { package_version }
else:
d[package_name].add(package_version)
logger.debug("add to cache %s: %s", package_name, package_version)
package_name = None
package_version = None
if line.startswith("Package: "):
package_name = re.sub("Package:[ \t]*", "", line).strip()
if line.startswith("Version: "):
package_version = re.sub("Version:[ \t]*", "", line).strip()
self.cache[url] = d
def _cache_url(self, url: str):
# try different compress format first
for suffix in [ ".xz", ".gz", ".bz2", "" ]:
u = url + suffix
if http_file_exists(u):
d = http_download(u)
if len(suffix) > 0:
self._build_index(url, decompress_file(d))
else:
self._build_index(url, d)
return
raise Exception("No Packages file and its compressed version was found from {}"
.format(url))
def _is_url_cached(self, url: str):
return url in self.cache.keys()
def _construct_url(self, suite: str, component: str, arch: str):
return f"{self.apt_base_url}/dists/{suite}/{component}/binary-{arch}/Packages"
def is_exists(self, package_name: str, package_version: str, package_arch: str,
suite: str="stable", component: str="main"):
"""
:param package_name: "Package" value in debian control
:param package_version: "Version" value in debian control
:param package_arch: "Architecture" value in debian control
:param suite: suite or codename, most time "stable" is a symlink to
actual codename and be fine for default
:param component: should be "main" in most situations
"""
url = self._construct_url(suite, component, package_arch)
if not self._is_url_cached(url):
self._cache_url(url)
return package_name in self.cache[url] \
and package_version in self.cache[url][package_name]
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--apt-base", type=str, required=True,
help="Apt repository's url, shoud contains dists/")
parser.add_argument("--cache-file", type=str, required=False,
help="cache file, will create if not exists")
parser.add_argument("--package-name", type=str, required=False,
help="package name to check")
parser.add_argument("--package-version", type=str, required=False,
help="package version to check")
parser.add_argument("--filename", type=str, required=False,
help="get package name and version from filename")
parser.add_argument("--package-arch", type=str, required=True,
help="package architecture to check")
parser.add_argument("--verbose", type=bool, required=False,
help="show more log")
arg = parser.parse_args()
if not ((arg.package_name and arg.package_version) or arg.filename):
logging.error("You must either specify --filename or (--package-name and --package-version)")
exit(1)
log_level=logging.INFO
if arg.verbose:
log_level=logging.DEBUG
logging.basicConfig(level=log_level, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
p = PackagesCache(arg.apt_base)
if arg.cache_file and os.path.exists(arg.cache_file):
p.load(arg.cache_file)
if arg.filename:
package_name, package_version = parse_filename(arg.filename)
else:
package_name = arg.package_name
package_version = arg.package_version
ext=4
if p.is_exists(package_name, package_version, arg.package_arch):
ext=0
if arg.cache_file:
p.dump(arg.cache_file)
exit(ext)
if __name__ == "__main__":
main()

50
script/util.sh Normal file
View file

@ -0,0 +1,50 @@
#!/usr/bin/evn bash
#
set -o pipefail
set -o errexit
set -o errtrace
WEBDAV_USER=${WEBDAV_USER:?WEBDAV_USER not set}
WEBDAV_PASS=${WEBDAV_PASS:?WEBDAV_PASS not set}
WEBDAV_HOST=${WEBDAV_HOST:?WEBDAV_HOST not set}
WEBDAV_REPOPATH=${WEBDAV_REPOPATH:?WEBDAV_REPOPATH not set}
function curl_alias {
curl --show-error --silent --user "${WEBDAV_USER}:${WEBDAV_PASS}" "$@"
}
function webdav_is_file_exists {
local target="${1%/}"
local target="${target#/}"
local resp=$(curl_alias --head --write-out "%{http_code}" "${WEBDAV_HOST}/${WEBDAV_REPOPATH}/${target}" | tail -n 1)
[[ "${resp}" == "200" ]]
}
function webdav_mkcol {
local dirname="${1#/}"
curl_alias --request MKCOL "${WEBDAV_HOST}/${dirname}"
}
function webdav_upload_file {
echo "uploading file $1 to ${WEBDAV_HOST}/${WEBDAV_REPOPATH}/$2 ..."
curl_alias --silent --upload-file "$1" "${WEBDAV_HOST}/${WEBDAV_REPOPATH}/$2"
echo "file $1 uploaded to ${WEBDAV_HOST}/${WEBDAV_REPOPATH}/$2"
}
function build_single {
(
local package_name="$1"
local package_path="$(realpath "$1")"
cd "$package_path"
local target_filename="$(../makedeb/makedeb -STF | tail -n 1)"
if webdav_is_file_exists "${target_filename}" ; then
echo "${package_name} already built, skipped."
return
fi
../makedeb/makedeb
webdav_upload_file "${target_filename}" "${target_filename}"
)
}