Compare commits

..

4 Commits

Author SHA1 Message Date
dcf20c7d73 style: update merge-apt-repo.py 2025-10-11 17:04:44 +08:00
a0b6e71f9a add yesplaymusic 2025-10-11 17:04:44 +08:00
9f0e777b8c update wps-office description 2025-10-11 17:04:44 +08:00
244baa9b43 revert: remove debiancn repo, restore wps-office 2025-10-11 17:04:44 +08:00
5 changed files with 75 additions and 63 deletions

View File

@ -31,6 +31,7 @@
| [QQ音乐](https://y.qq.com/download/download.html) | qqmusic | ✅ | | | [QQ音乐](https://y.qq.com/download/download.html) | qqmusic | ✅ | |
| [腾讯会议](https://meeting.tencent.com/download/) | wemeet | ✅ | ✅ | | [腾讯会议](https://meeting.tencent.com/download/) | wemeet | ✅ | ✅ |
| [腾讯文档](https://docs.qq.com/home/download) | tdappdesktop | ✅ | ✅ | | [腾讯文档](https://docs.qq.com/home/download) | tdappdesktop | ✅ | ✅ |
| [WPS Office](https://linux.wps.cn/) | wps-office | ✅ | |
| [百度网盘](https://pan.baidu.com/download) | baidunetdisk | ✅ | | | [百度网盘](https://pan.baidu.com/download) | baidunetdisk | ✅ | |
| [钉钉](https://www.dingtalk.com/download/) | com.alibabainc.dingtalk | ✅ | ✅ | | [钉钉](https://www.dingtalk.com/download/) | com.alibabainc.dingtalk | ✅ | ✅ |
| [飞书](https://www.feishu.cn/download) | bytedance-feishu-stable | ✅ | ✅ | | [飞书](https://www.feishu.cn/download) | bytedance-feishu-stable | ✅ | ✅ |
@ -110,7 +111,6 @@
| [lazydocker: wcbing 打包](https://github.com/wcbing-build/lazydocker-debs) | lazydocker | ✅ | ✅ | | [lazydocker: wcbing 打包](https://github.com/wcbing-build/lazydocker-debs) | lazydocker | ✅ | ✅ |
| [lazygit: wcbing 打包](https://github.com/wcbing-build/lazygit-debs) | lazygit | ✅ | ✅ | | [lazygit: wcbing 打包](https://github.com/wcbing-build/lazygit-debs) | lazygit | ✅ | ✅ |
| [NextTrace](https://github.com/nxtrace/nexttrace-debs) | nexttrace | ✅ | ✅ | | [NextTrace](https://github.com/nxtrace/nexttrace-debs) | nexttrace | ✅ | ✅ |
| [Debian 中文社区软件源](https://github.com/debiancn/repo)[镜像](https://help.mirrors.cernet.edu.cn/debiancn/) | anydesk<br />marktext<br />wps-office<br />[更多](https://github.com/debiancn/repo) | ✅ | |
| [Gitea](https://gitlab.com/packaging/gitea)[镜像](https://mirrors.ustc.edu.cn/help/packaging-gitea.html) | gitea | ✅ | ✅ | | [Gitea](https://gitlab.com/packaging/gitea)[镜像](https://mirrors.ustc.edu.cn/help/packaging-gitea.html) | gitea | ✅ | ✅ |
| [AnyDesk](https://deb.anydesk.com/howto.html) | anydesk | ✅ | ✅ | | [AnyDesk](https://deb.anydesk.com/howto.html) | anydesk | ✅ | ✅ |
| [Spotify](https://www.spotify.com/sg-zh/download/linux/) | spotify-client | ✅ | | | [Spotify](https://www.spotify.com/sg-zh/download/linux/) | spotify-client | ✅ | |

View File

@ -117,12 +117,6 @@
"mix": "Packages" "mix": "Packages"
} }
}, },
"debiancn": {
"repo": "https://mirrors.cernet.edu.cn/debiancn/",
"path": {
"amd64": "dists/bookworm/main/binary-amd64/Packages.gz"
}
},
"gitea": { "gitea": {
"repo": "https://mirrors.ustc.edu.cn/packaging-gitea/", "repo": "https://mirrors.ustc.edu.cn/packaging-gitea/",
"path": { "path": {

31
get/wps-office.sh Normal file
View File

@ -0,0 +1,31 @@
# decrypt() {
# url=$1
# pathname="/$(echo $url | cut -d '/' -f 4-)"
# secrity_key="7f8faaaa468174dc1c9cd62e5f218a5b"
# timestamp10=$(date '+%s')
# md5hash=$(echo -n "${secrity_key}${pathname}${timestamp10}" | md5sum | cut -d " " -f 1 )
# url="$url?t=${timestamp10}&k=${md5hash}"
# echo $url
# }
# WPS 官网 JS 代码大致逻辑如下:
# function downLoad(url) {
# var urlObj=new URL(url);
# var uri=urlObj.pathname;
# var secrityKey="7f8faaaa468174dc1c9cd62e5f218a5b";
# var timestamp10=Math.floor(new Date().getTime() / 1000);
# var md5hash=CryptoJS.MD5(secrityKey + uri + timestamp10);
# url += '?t=' + timestamp10 + '&k=' + md5hash
# console.log(url);
# }
WEB_CONTENT=$(curl -fs https://linux.wps.cn/)
VERSION=$(echo $WEB_CONTENT | grep -o "<p class=\"banner_txt\">[0-9.]*</p>" | sed 's/<p class=\"banner_txt\">\(.*\)<\/p>/\1/')
AMD64_ORI_URL=$(echo $WEB_CONTENT | grep -o "https://[0-9a-zA-Z_\/\.\-]*amd64\.deb" | head -n 1)
# AMD64_URL=$(decrypt $AMD64_ORI_URL)
# 使用 CloudFlare Workers 动态生成重定向链接,其基本逻辑如上方 JS 代码所示。
# 这样 Packages 中固定链接也可重定向至官网,不给官方白嫖流量的机会。
AMD64_URL="https://wps302.wcbing.workers.dev/$AMD64_ORI_URL"
./check_downloader.py wps-office $VERSION $AMD64_URL amd64

View File

@ -1,27 +0,0 @@
decrypt() {
url=$1
pathname="/$(echo $url | cut -d '/' -f 4-)"
secrity_key="7f8faaaa468174dc1c9cd62e5f218a5b"
timestamp10=$(date '+%s')
md5hash=$(echo -n "${secrity_key}${pathname}${timestamp10}" | md5sum | cut -d " " -f 1 )
url="$url?t=${timestamp10}&k=${md5hash}"
echo $url
# # js
# function downLoad(url) {
# var urlObj=new URL(url);
# var uri=urlObj.pathname;
# var secrityKey="7f8faaaa468174dc1c9cd62e5f218a5b";
# var timestamp10=Math.floor(new Date().getTime() / 1000);
# var md5hash=CryptoJS.MD5(secrityKey + uri + timestamp10);
# url += '?t=' + timestamp10 + '&k=' + md5hash
# console.log(url);
# }
}
WEB_CONTENT=$(curl -fs https://linux.wps.cn/)
VERSION=$(echo $WEB_CONTENT | grep -o "<p class=\"banner_txt\">[0-9.]*</p>" | sed 's/<p class=\"banner_txt\">\(.*\)<\/p>/\1/')
X64_ORI_URL=$(echo $WEB_CONTENT | grep -o "https://[0-9a-zA-Z_\/\.\-]*amd64\.deb" | head -n 1)
X64_URL=$(decrypt $X64_ORI_URL)
./check_downloader.py wps-office $VERSION $X64_URL

View File

@ -13,27 +13,36 @@ import sys
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from threading import Lock from threading import Lock
import apt_pkg import apt_pkg
from apt_pkg import version_compare
apt_pkg.init() # 初始化 apt_pkg apt_pkg.init() # 初始化 apt_pkg
package_version = {arch: {} for arch in ["all", "amd64", "i386", "arm64"]} USER_AGENT = "Debian APT-HTTP/1.3 (3.0.3)" # from Debian 13
package_info = {arch: {} for arch in ["all", "amd64", "i386", "arm64"]} arch_List = ["all", "amd64", "arm64", "i386"]
lock = {arch: Lock() for arch in ["all", "amd64", "i386", "arm64"]} lock = {arch: Lock() for arch in arch_List}
packages = {arch: {} for arch in arch_List} # 存放用于生成 Packages 的内容
USER_AGENT = "Debian APT-HTTP/1.3 (2.6.1)" # from Debian 12 """ packages format:
{
""" "arch": {
repo info json format: "package1": {
"repo_name": { "version": "1.0.0",
"repo": repo url, end with "/" "package": ""
"xxx_path": { }
"arch": repo Packages file path of "arch", start with no "/"
} }
} }
""" """
def read_repo_list(repo_list_file: str) -> dict: def read_repo_list(repo_list_file: str) -> dict:
"""
repo info json format:
"repo_name": {
"repo": repo url, end with "/" is better
"path": {
"arch": repo Packages file path of "arch", don't start with "/"
}
}
"""
try: try:
with open(repo_list_file, "r") as f: with open(repo_list_file, "r") as f:
return json.load(f) return json.load(f)
@ -46,7 +55,7 @@ def get_remote_packages(repo_url: str, file_path: str) -> bytes:
""" """
get the packages file content from remote repo get the packages file content from remote repo
""" """
file_url = repo_url + file_path file_url = os.path.join(repo_url, file_path)
try: try:
response = requests.get( response = requests.get(
file_url, timeout=10, headers={"User-Agent": USER_AGENT} file_url, timeout=10, headers={"User-Agent": USER_AGENT}
@ -77,28 +86,33 @@ def get_remote_packages(repo_url: str, file_path: str) -> bytes:
return b"" return b""
def get_latest(deb_packages: bytes): def split_latest(packages_file_content: bytes):
""" """
split the information of each packet, deduplication and store the latest in infoList split the information of each packet, deduplication and store the latest in infoList
将每个包的信息分割开去重并将最新的存放到 infoList 将每个包的信息分割开去重并将最新的存放到 infoList
""" """
deb_packages = re.sub(rb"^Package: ", b"{{start}}Package: ", deb_packages, flags=re.MULTILINE) packages_file_content = re.sub(
info_list = deb_packages.split(b"{{start}}")[1:] rb"^Package: ", b"{{start}}Package: ", packages_file_content, flags=re.MULTILINE
)
package_list = packages_file_content.split(b"{{start}}")[1:]
find_name = re.compile(rb"Package: (.+)") find_name = re.compile(rb"Package: (.+)")
find_arch = re.compile(rb"Architecture: (.+)") find_arch = re.compile(rb"Architecture: (.+)")
find_version = re.compile(rb"Version: (.+)") find_version = re.compile(rb"Version: (.+)")
for v in info_list: for package in package_list:
name = "unknown"
try: try:
name = find_name.search(v).group(1).decode() name = find_name.search(package).group(1).decode()
arch = find_arch.search(v).group(1).decode() arch = find_arch.search(package).group(1).decode()
tmp_version = find_version.search(v).group(1).decode() tmp_version = find_version.search(package).group(1).decode()
with lock[arch]: with lock[arch]:
# 使用 apt_pkg 进行版本比较 # 使用 apt_pkg 进行版本比较
if name not in package_version[arch] or apt_pkg.version_compare(tmp_version, package_version[arch][name]) > 0: if (
package_version[arch][name] = tmp_version name not in packages[arch]
package_info[arch][name] = v or version_compare(tmp_version, packages[arch][name]["version"]) > 0
):
packages[arch][name] = {"package": package, "version": tmp_version}
except Exception as e: except Exception as e:
logging.error(f"Error processing package {name}: {e}") logging.error(f"Error processing package {name}: {e}")
return return
@ -110,7 +124,7 @@ def process_repo(r: dict):
""" """
try: try:
for path in r["path"].values(): for path in r["path"].values():
get_latest(get_remote_packages(r["repo"], path)) split_latest(get_remote_packages(r["repo"], path))
except Exception as e: except Exception as e:
logging.error(f"Error processing repo {r.get('name', 'unknown')}: {e}") logging.error(f"Error processing repo {r.get('name', 'unknown')}: {e}")
@ -136,7 +150,7 @@ if __name__ == "__main__":
# 处理本地 repo # 处理本地 repo
if args.local: if args.local:
with open(args.local) as f: with open(args.local) as f:
get_latest(f.read().encode()) split_latest(f.read().encode())
# 读取 repo_list 配置 # 读取 repo_list 配置
repo_list = read_repo_list(args.repo) repo_list = read_repo_list(args.repo)
@ -151,7 +165,7 @@ if __name__ == "__main__":
for arch in ["amd64", "arm64"]: for arch in ["amd64", "arm64"]:
os.makedirs(f"deb/dists/wcbing/main/binary-{arch}/", exist_ok=True) os.makedirs(f"deb/dists/wcbing/main/binary-{arch}/", exist_ok=True)
with open(f"deb/dists/wcbing/main/binary-{arch}/Packages", "+wb") as f: with open(f"deb/dists/wcbing/main/binary-{arch}/Packages", "+wb") as f:
for i in package_info[arch].values(): for i in packages[arch].values():
f.write(i) f.write(i["package"])
for i in package_info["all"].values(): for i in packages["all"].values():
f.write(i) f.write(i["package"])