feat(easypanel): actualizar configuración y scripts de EasyPanel, incluyendo mejoras en la generación de contraseñas, sincronización de herramientas y gestión de configuraciones
Some checks are pending
Container / meta (analyzer) (push) Waiting to run
Container / meta (api) (push) Waiting to run
Container / meta (legacy) (push) Waiting to run
Container / meta (nginx) (push) Waiting to run
Container / meta (playout) (push) Waiting to run
Container / meta (worker) (push) Waiting to run
Container / build (push) Blocked by required conditions
Project / pre-commit (push) Waiting to run
Project / test-tools (push) Waiting to run
Release-Please / release-please (push) Waiting to run

This commit is contained in:
Cesar Jhoanny Mendivil Rubio 2025-10-01 17:41:20 -07:00
parent 83724ddc26
commit 697b7cc288
22 changed files with 976 additions and 5 deletions

View File

@ -15,7 +15,7 @@ services:
restart: unless-stopped restart: unless-stopped
environment: environment:
POSTGRES_USER: ${POSTGRES_USER:-libretime} POSTGRES_USER: ${POSTGRES_USER:-libretime}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-Jz/XxRUodVl2g0HE59DszTBJVY8Sdmv7} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-libretime}
POSTGRES_DB: ${POSTGRES_DB:-libretime} POSTGRES_DB: ${POSTGRES_DB:-libretime}
volumes: volumes:
- postgres_data:/var/lib/postgresql/data - postgres_data:/var/lib/postgresql/data
@ -32,7 +32,7 @@ services:
environment: environment:
RABBITMQ_DEFAULT_VHOST: ${RABBITMQ_DEFAULT_VHOST:-/libretime} RABBITMQ_DEFAULT_VHOST: ${RABBITMQ_DEFAULT_VHOST:-/libretime}
RABBITMQ_DEFAULT_USER: ${RABBITMQ_DEFAULT_USER:-libretime} RABBITMQ_DEFAULT_USER: ${RABBITMQ_DEFAULT_USER:-libretime}
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_DEFAULT_PASS:-Bn321PQSRXanvmZlppuulVCB0ShN5Dz2} RABBITMQ_DEFAULT_PASS: ${RABBITMQ_DEFAULT_PASS}
healthcheck: healthcheck:
test: ["CMD-SHELL", "rabbitmq-diagnostics check_port_connectivity"] test: ["CMD-SHELL", "rabbitmq-diagnostics check_port_connectivity"]
interval: 30s interval: 30s
@ -96,9 +96,9 @@ services:
image: ghcr.io/libretime/icecast:2.4.4 image: ghcr.io/libretime/icecast:2.4.4
restart: unless-stopped restart: unless-stopped
environment: environment:
ICECAST_SOURCE_PASSWORD: ${ICECAST_SOURCE_PASSWORD:-dna1g1GcaaHakSN6C9X7rcPRpIIc/jV2} ICECAST_SOURCE_PASSWORD: ${ICECAST_SOURCE_PASSWORD:-hackme}
ICECAST_ADMIN_PASSWORD: ${ICECAST_ADMIN_PASSWORD:-BLoYLPlUXfmkxrsvGF7LP0TtVtuKNuzJ} ICECAST_ADMIN_PASSWORD: ${ICECAST_ADMIN_PASSWORD:-hackme}
ICECAST_RELAY_PASSWORD: ${ICECAST_RELAY_PASSWORD:-jYzhEjwdiJlTk30QOYHum6UE61FHo+sd} ICECAST_RELAY_PASSWORD: ${ICECAST_RELAY_PASSWORD:-hackme}
ICECAST_ADMIN_USER: ${ICECAST_ADMIN_USER:-admin} ICECAST_ADMIN_USER: ${ICECAST_ADMIN_USER:-admin}
ICECAST_HOSTNAME: ${ICECAST_HOSTNAME:-localhost} ICECAST_HOSTNAME: ${ICECAST_HOSTNAME:-localhost}

View File

@ -0,0 +1,15 @@
all: lint test
include python.mk
PIP_INSTALL = \
requests \
types-requests
PYLINT_ARG = tools
MYPY_ARG = .
PYTEST_ARG = .
format: .format
lint: .format-check .pylint .mypy
test: .pytest
clean: .clean

View File

@ -0,0 +1,3 @@
# Tools
This folder contains scripts/tools to manage the project.

View File

View File

@ -0,0 +1,63 @@
#!/usr/bin/env bash
# Sync the docs folder with the libretime/website repository.
set -e
error() {
echo >&2 "error: $*"
exit 1
}
command -v git > /dev/null || error "git command not found!"
usage() {
cat >&2 <<- EOF
Usage : $0 <commit_range>
Positional arguments:
commit_range Commit range to scan for changes within the docs folder.
EOF
}
if [[ $# -lt 1 ]]; then
usage
exit 1
fi
commit_range="$1"
[[ -n "$GITHUB_REF_NAME" ]] || error "GITHUB_REF_NAME variable is not set!"
[[ -n "$GITHUB_REPOSITORY" ]] || error "GITHUB_REPOSITORY variable is not set!"
git config --global user.name "libretime-bot"
git config --global user.email "libretime-bot@users.noreply.github.com"
if [[ "$GITHUB_REF_NAME" == "main" ]]; then
dest="docs"
else
dest="versioned_docs/version-$GITHUB_REF_NAME"
fi
for commit in $(git rev-list --reverse --no-merges "$commit_range" -- docs); do
git checkout "$commit"
git show \
--quiet \
--format="%B%n${GITHUB_REPOSITORY}@%H" \
"$commit" \
> commit-message
rm -fR "website/$dest"
cp -r "docs" "website/$dest"
pushd website
git add "$dest"
git diff-index --quiet HEAD -- || git commit --file=../commit-message
popd
rm commit-message
done
pushd website
git push
popd

View File

@ -0,0 +1,108 @@
#!/usr/bin/env python3
# pylint: disable=invalid-name
import logging
from argparse import ArgumentParser
from os import environ
from subprocess import check_output
from typing import Any, Generator, List, Tuple
from requests import Session
logger = logging.getLogger("contributors")
REPOSITORY = "libretime/libretime"
EXCLUDED_CONTRIBUTORS = {
"dependabot[bot]",
"invalid-email-address",
"libretime-bot",
"renovate-bot",
"renovate[bot]",
"web-flow",
"weblate",
}
def extract_date_range(commit_range: str) -> Tuple[str, str]:
output = check_output(
["git", "log", "--reverse", "--format=%cI", commit_range], text=True
)
lines = output.splitlines()
return lines[0], lines[-1]
def gh_get_commits(
client: Session,
since: str,
until: str,
) -> Generator[dict[str, Any], None, None]:
per_page = 100
page = 1
while True:
logger.info("querying page %s", page)
with client.get(
f"https://api.github.com/repos/{REPOSITORY}/commits",
params={ # type: ignore[arg-type]
"per_page": per_page,
"page": page,
"since": since,
"until": until,
},
timeout=5,
) as resp:
resp.raise_for_status()
commits: List[dict] = resp.json()
yield from commits
if len(commits) < per_page:
break
page += 1
def main(commit_range: str) -> int:
client = Session()
if "GITHUB_TOKEN" in environ:
logger.info("loading GITHUB_TOKEN")
github_token = environ["GITHUB_TOKEN"]
client.headers.update({"Authorization": f"token {github_token}"})
contributors = set()
since, until = extract_date_range(commit_range)
logger.info("%s: %s => %s", commit_range, since, until)
for commit in gh_get_commits(client, since, until):
if commit["author"] is None or commit["committer"] is None:
continue
try:
author: str = commit["author"]["login"]
committer: str = commit["committer"]["login"]
contributors.add(author.casefold())
contributors.add(committer.casefold())
except (KeyError, TypeError) as exception:
logger.error("%s: %s", exception, commit)
contributors -= EXCLUDED_CONTRIBUTORS
print()
for contributor in sorted(contributors):
print(f"- @{contributor}")
print()
return 0
if __name__ == "__main__":
logging.basicConfig(
level=logging.INFO,
format="%(levelname)s:\t%(message)s",
)
parser = ArgumentParser()
parser.add_argument("commit_range")
args = parser.parse_args()
raise SystemExit(main(commit_range=args.commit_range))

View File

@ -0,0 +1,39 @@
#!/bin/sh
# Script de prueba: genera config en ./tmp_config/config.yml para validar el contenido
set -eu
CONFIG_DIR=./tmp_config
CONFIG_PATH="$CONFIG_DIR/config.yml"
mkdir -p "$CONFIG_DIR"
cat > "$CONFIG_PATH" <<EOF
general:
public_url: "${LIBRETIME_GENERAL_PUBLIC_URL:-http://localhost:8080}"
api_key: "${LIBRETIME_API_KEY:-}"
secret_key: "${LIBRETIME_SECRET_KEY:-}"
database:
host: ${POSTGRES_HOST:-postgres}
port: ${POSTGRES_PORT:-5432}
name: ${POSTGRES_DB:-libretime}
user: ${POSTGRES_USER:-libretime}
password: "${POSTGRES_PASSWORD:-}"
rabbitmq:
host: ${RABBITMQ_HOST:-rabbitmq}
port: ${RABBITMQ_PORT:-5672}
vhost: ${RABBITMQ_DEFAULT_VHOST:-/libretime}
user: ${RABBITMQ_DEFAULT_USER:-libretime}
password: "${RABBITMQ_DEFAULT_PASS:-}"
icecast:
source_password: "${ICECAST_SOURCE_PASSWORD:-changeme}"
admin_password: "${ICECAST_ADMIN_PASSWORD:-changeme}"
relay_password: "${ICECAST_RELAY_PASSWORD:-changeme}"
admin_user: "${ICECAST_ADMIN_USER:-admin}"
hostname: "${ICECAST_HOSTNAME:-localhost}"
EOF
echo "wrote $CONFIG_PATH"

View File

@ -0,0 +1,52 @@
#!/bin/sh
# Generador de config para EasyPanel
# Lee variables de entorno y escribe /config/config.yml de forma atómica
set -eu
CONFIG_PATH=/config/config.yml
TMP_PATH=/config/config.yml.tmp
# Generar usando heredoc sin comillas para permitir expansión de variables
cat > "$TMP_PATH" <<EOF
general:
public_url: "${LIBRETIME_GENERAL_PUBLIC_URL:-http://localhost:8080}"
api_key: "${LIBRETIME_API_KEY:-}"
secret_key: "${LIBRETIME_SECRET_KEY:-}"
database:
host: "${POSTGRES_HOST:-postgres}"
port: ${POSTGRES_PORT:-5432}
name: "${POSTGRES_DB:-libretime}"
user: "${POSTGRES_USER:-libretime}"
password: "${POSTGRES_PASSWORD:-}"
rabbitmq:
host: "${RABBITMQ_HOST:-rabbitmq}"
port: ${RABBITMQ_PORT:-5672}
vhost: "${RABBITMQ_DEFAULT_VHOST:-/libretime}"
user: "${RABBITMQ_DEFAULT_USER:-libretime}"
password: "${RABBITMQ_DEFAULT_PASS:-}"
icecast:
source_password: "${ICECAST_SOURCE_PASSWORD:-changeme}"
admin_password: "${ICECAST_ADMIN_PASSWORD:-changeme}"
relay_password: "${ICECAST_RELAY_PASSWORD:-changeme}"
admin_user: "${ICECAST_ADMIN_USER:-admin}"
hostname: "${ICECAST_HOSTNAME:-localhost}"
EOF
# Validación mínima: debe contener la clave 'general:'
if ! grep -q '^general:' "$TMP_PATH"; then
echo "ERROR: config generation failed (missing 'general:')"
rm -f "$TMP_PATH"
exit 1
fi
# Mover de forma atómica
mv "$TMP_PATH" "$CONFIG_PATH"
echo "wrote $CONFIG_PATH"
# Mantener el contenedor en ejecución para que dependientes puedan verificar salud
tail -f /dev/null

View File

@ -0,0 +1,56 @@
#!/usr/bin/env python3
# Extract the dependencies from the setup.py files
# and save the result to requirements.txt.
#
# You can filter any extra require by adding the name as argument.
#
# Examples:
# tools/extract_requirements.py
# tools/extract_requirements.py dev
import ast
from glob import glob
from pathlib import Path
from sys import argv
class RemoveJoinedStr(ast.NodeTransformer):
def visit_JoinedStr(self, _node): # pylint: disable=invalid-name
pass
for setup in glob("*/setup.py"):
setup_path = Path(setup)
requirements_path = setup_path.parent / "requirements.txt"
lines = [
"# Please do not edit this file, edit the setup.py file!",
"# This file is auto-generated by tools/extract_requirements.py.",
]
requires = []
for node in ast.walk(ast.parse(setup_path.read_text(encoding="utf-8"))):
if (
isinstance(node, ast.Expr)
and isinstance(node.value, ast.Call)
and isinstance(node.value.func, ast.Name)
and node.value.func.id == "setup"
):
for keyword in node.value.keywords:
if keyword.arg == "install_requires":
requires.extend(ast.literal_eval(keyword.value))
if keyword.arg == "extras_require":
extras = ast.literal_eval(RemoveJoinedStr().visit(keyword.value))
for key, values in extras.items():
if key in argv:
continue
requires.extend(values)
lines.extend(sorted(requires))
requirements_path.write_text("\n".join(lines) + "\n", encoding="utf-8")

View File

@ -0,0 +1,36 @@
#!/usr/bin/env bash
set -eu
error() {
echo >&2 "error: $*"
exit 1
}
command -v curl > /dev/null || error "curl command not found!"
# Run concurrent curls which download from url to /dev/null.
url="$1"
# max concurrent calls
max=1000
# call duration (in seconds)
duration=100
# number of calls to start in batch
batch=10
# time to wait before starting a new batch of calls (in seconds)
delay=1
count=0
while [[ "$count" -le "$max" ]]; do
echo "starting $batch new calls ($count)"
for ((i = 1; i <= batch; i++)); do
curl -o /dev/null -m "$duration" -s "$url" &
done
count=$((count + batch))
sleep "$delay"
done
echo "waiting for calls to finish"
wait
echo "done"

View File

@ -0,0 +1,20 @@
#!/usr/bin/env bash
set -u
error() {
echo >&2 "error: $*"
exit 1
}
migrations="api/libretime_api/legacy/migrations"
version_file="$migrations/__init__.py"
latest_migration="$(find "$migrations" -name '[0-9][0-9][0-9][0-9]_*.py' | sort | tail -n 1)"
latest_migration_version="$(basename "$latest_migration" | cut -d '_' -f 1)"
latest_migration_version="$((10#$latest_migration_version))" # Strip leading zeros
sed \
-i "s#^LEGACY_SCHEMA_VERSION =.*#LEGACY_SCHEMA_VERSION = \"$latest_migration_version\"#" \
"$version_file"

113
easypanel/code/tools/packages.py Executable file
View File

@ -0,0 +1,113 @@
#!/usr/bin/env python3
from argparse import ArgumentParser
from configparser import ConfigParser
from os import PathLike
from pathlib import Path
from typing import Iterator, List, Optional, Set
DEFAULT_PACKAGES_FILENAME = "packages.ini"
FORMATS = ("list", "line")
DISTRIBUTIONS = ("focal", "bullseye", "jammy", "bookworm")
SETTINGS_SECTION = "=settings"
DEVELOPMENT_SECTION = "=development"
def load_packages(
raw: str,
distribution: str,
development: bool = False,
exclude: Optional[List[str]] = None,
) -> Set[str]:
if distribution not in DISTRIBUTIONS:
raise ValueError(f"Invalid distribution '{distribution}'")
manager = ConfigParser(default_section=SETTINGS_SECTION)
manager.read_string(raw)
packages = set()
exclude = set(exclude or [])
for section, entries in manager.items():
if not development and section == DEVELOPMENT_SECTION or section in exclude:
continue
for package, distributions in entries.items():
if distribution in distributions.split(", "):
packages.add(package)
return packages
def list_packages_files(
paths: List[PathLike],
) -> Iterator[Path]:
for path_like in paths:
path = Path(path_like)
if path.is_dir():
path = path / DEFAULT_PACKAGES_FILENAME
if not path.is_file():
raise ValueError(f"{path} is not a file!")
yield path
def list_packages(
paths: List[PathLike],
distribution: str,
development: bool = False,
exclude: Optional[List[str]] = None,
) -> Set[str]:
packages = set()
for package_file in list_packages_files(paths):
raw = package_file.read_text()
packages.update(load_packages(raw, distribution, development, exclude))
return set(sorted(packages))
def run():
parser = ArgumentParser()
parser.add_argument(
"-f",
"--format",
choices=FORMATS,
help="print packages list in a specific format.",
default="list",
)
parser.add_argument(
"-d",
"--dev",
help="include development packages.",
action="store_true",
)
parser.add_argument(
"-e",
"--exclude",
help="exclude packages sections.",
action="append",
)
parser.add_argument(
"distribution",
choices=DISTRIBUTIONS,
help="list packages for the given distribution.",
)
parser.add_argument(
"path",
nargs="+",
help="list packages from given files or directories.",
)
args = parser.parse_args()
packages = list_packages(args.path, args.distribution, args.dev, args.exclude)
if args.format == "list":
print("\n".join(packages))
else:
print(" ".join(packages))
if __name__ == "__main__":
run()

View File

@ -0,0 +1,37 @@
from pathlib import Path
from tools.packages import list_packages, load_packages
PACKAGE_INI = """
[common]
postgresql = focal, jammy
# Some comment
curl = bullseye, jammy
[legacy]
some-package = focal, bullseye
[=development]
ffmpeg = focal, bullseye, jammy
"""
result_jammy = {"curl", "postgresql"}
result_bullseye = {"some-package", "curl", "ffmpeg"}
result_focal = {"postgresql", "some-package", "ffmpeg"}
result_exclude = {"postgresql", "ffmpeg"}
def test_load_packages():
assert load_packages(PACKAGE_INI, "jammy", False) == result_jammy
assert load_packages(PACKAGE_INI, "bullseye", True) == result_bullseye
assert load_packages(PACKAGE_INI, "focal", True) == result_focal
assert load_packages(PACKAGE_INI, "focal", True, ["legacy"]) == result_exclude
def test_list_packages(tmp_path: Path) -> None:
package_file = tmp_path / "packages.ini"
package_file.write_text(PACKAGE_INI)
assert list_packages([tmp_path, package_file], "jammy", False) == result_jammy
assert list_packages([tmp_path, package_file], "bullseye", True) == result_bullseye
assert list_packages([tmp_path, package_file], "focal", True) == result_focal

View File

@ -0,0 +1,19 @@
[tool.isort]
profile = "black"
combine_as_imports = true
[tool.pylint.messages_control]
extension-pkg-whitelist = "pydantic"
disable = [
"missing-class-docstring",
"missing-function-docstring",
"missing-module-docstring",
]
[tool.mypy]
allow_redefinition = true
disallow_incomplete_defs= true
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"

View File

@ -0,0 +1,9 @@
bandit>=1.7.4,<2
black>=23.1.0,<26
flake8>=6.0.0,<8
isort>=5.12.0,<7
mypy>=1.6.0,<2
pylint>=2.16.1,<4
pytest-cov>=4.0.0,<6
pytest-xdist>=3.1.0,<4
pytest>=7.2.1,<9

View File

@ -0,0 +1,75 @@
.DEFAULT_GOAL := install
SHELL := bash
CPU_CORES := $(shell N=$$(nproc); echo $$(( $$N > 4 ? 4 : $$N )))
# PIP_INSTALL = --editable .
# PYLINT_ARG =
# MYPY_ARG =
# BANDIT_ARG =
# PYTEST_ARG =
VENV = .venv
install: $(VENV)
$(VENV):
python3 -m venv $(VENV)
$(VENV)/bin/pip install --upgrade "pip<24.1" setuptools wheel
$(VENV)/bin/pip install --prefer-binary \
--requirement ../tools/python-requirements.txt \
$(PIP_INSTALL)
.PHONY: .format
.format: $(VENV)
$(VENV)/bin/black .
$(VENV)/bin/isort .
.PHONY: .format-check
.format-check: $(VENV)
$(VENV)/bin/black . --check
$(VENV)/bin/isort . --check
.PHONY: .pylint
.pylint: $(VENV)
$(VENV)/bin/pylint --jobs=$(CPU_CORES) --output-format=colorized --recursive=true $(PYLINT_ARG)
.PHONY: .mypy
.mypy: $(VENV)
$(VENV)/bin/mypy $(MYPY_ARG)
.PHONY: .bandit
.bandit: $(VENV)
$(VENV)/bin/bandit -r $(BANDIT_ARG)
.PHONY: .pytest
.pytest: $(VENV)
$(VENV)/bin/pytest \
--numprocesses=$(CPU_CORES) \
--color=yes
.PHONY: .coverage
.coverage: $(VENV)
$(VENV)/bin/pytest \
--numprocesses=$(CPU_CORES) \
--cov \
--cov-config=pyproject.toml \
--cov-report=term \
--cov-report=xml
.PHONY: .clean
.clean:
rm -Rf $(VENV)
DISTRO ?= bullseye
DOCKER_RUN = docker run -it --rm \
--user $$(id -u):$$(id -g) \
--env HOME=/src/.docker/$(DISTRO) \
--volume $$(pwd)/..:/src \
--workdir /src/$(APP) \
ghcr.io/libretime/libretime-dev:$(DISTRO)
docker-dev:
$(MAKE) clean
$(DOCKER_RUN) bash
docker-test:
$(MAKE) clean
$(DOCKER_RUN) make test

View File

@ -0,0 +1,102 @@
#!/usr/bin/env sh
# restore-config.sh
# Lista y restaura backups creados por start.sh (config.local.yml.bak.YYYYMMDDHHMMSS)
set -e
WORKDIR="$(pwd)"
BACKUP_GLOB="$WORKDIR/config.local.yml.bak.*"
DEST="$WORKDIR/config.local.yml"
usage() {
cat <<EOF
Usage: $0 [--list] [--latest] [--restore <file>] [--yes]
Options:
--list List available backup files
--latest Show the latest backup file (implies --list)
--restore FILE Restore the specified backup into config.local.yml
--yes Skip confirmation when restoring
Examples:
$0 --list
$0 --latest
$0 --restore ./config.local.yml.bak.20251001102251
$0 --restore latest --yes
EOF
}
list_backups() {
ls -1 $BACKUP_GLOB 2>/dev/null | sort || true
}
latest_backup() {
# sort by name (timestamp suffix) and pick last
ls -1 $BACKUP_GLOB 2>/dev/null | sort | tail -n 1 || true
}
if [ "$#" -eq 0 ]; then
usage
exit 0
fi
FORCE=0
MODE=""
TARGET=""
while [ "$#" -gt 0 ]; do
case "$1" in
--list)
MODE=list; shift ;;
--latest)
MODE=latest; shift ;;
--restore)
MODE=restore; TARGET="$2"; shift 2 ;;
--yes)
FORCE=1; shift ;;
-h|--help)
usage; exit 0 ;;
*)
echo "Unknown arg: $1" >&2; usage; exit 1 ;;
esac
done
case "$MODE" in
list)
echo "Backups:";
list_backups;
exit 0
;;
latest)
echo "Latest backup:";
latest_backup;
exit 0
;;
restore)
if [ "$TARGET" = "latest" ] || [ -z "$TARGET" ]; then
TARGET=$(latest_backup)
fi
if [ -z "$TARGET" ] || [ ! -f "$TARGET" ]; then
echo "No backup found to restore: $TARGET" >&2
exit 1
fi
echo "About to restore backup: $TARGET -> $DEST"
if [ "$FORCE" -ne 1 ]; then
printf "Continue and overwrite %s? [y/N]: " "$DEST"
read ans || true
case "$ans" in
y|Y|yes|YES)
;;
*)
echo "Aborted."; exit 1 ;;
esac
fi
cp "$TARGET" "$DEST"
echo "Restored $TARGET -> $DEST"
exit 0
;;
*)
usage; exit 1 ;;
esac

View File

@ -0,0 +1,84 @@
#!/usr/bin/env python3
# pylint: disable=invalid-name
import subprocess
from argparse import (
ArgumentDefaultsHelpFormatter,
ArgumentParser,
RawDescriptionHelpFormatter,
)
from contextlib import suppress
class ArgumentParserFormatter(
RawDescriptionHelpFormatter,
ArgumentDefaultsHelpFormatter,
):
pass
def run():
parser = ArgumentParser(
description="Send a sine wave sound to an icecast mount or liquidsoap input harbor.",
formatter_class=lambda prog: ArgumentParserFormatter(
prog, max_help_position=60
),
)
parser.add_argument(
"--url",
metavar="<url>",
help="""Stream <url> (<user>:<password>@<host>:<port>/<mount>) to test. If
defined any other option will be ignored.""",
)
parser.add_argument(
"--host",
metavar="<host>",
help="Stream <host> used to build the stream url.",
default="localhost",
)
parser.add_argument(
"--port",
metavar="<port>",
help="Stream <port> used to build the stream url.",
default=8001,
)
parser.add_argument(
"--mount",
metavar="<mount>",
help="Stream <mount> used to build the stream url.",
default="main",
)
parser.add_argument(
"--user",
metavar="<user>",
help="Stream <user> used to build the stream url.",
default="source",
)
parser.add_argument(
"--password",
metavar="<password>",
help="Stream <password> used to build the stream url.",
default="hackme",
)
args = parser.parse_args()
stream_url = args.url
if stream_url is None:
stream_url = f"icecast://{args.user}:{args.password}@{args.host}:{args.port}/{args.mount}"
cmd = ["ffmpeg", "-hide_banner"]
cmd.extend(["-re"])
cmd.extend(["-f", "lavfi", "-i", "sine=frequency=1000"])
cmd.extend(["-ar", "48000", "-ac", "2"])
cmd.extend(["-f", "ogg"])
cmd.extend(["-content_type", "application/ogg"])
cmd.extend([stream_url])
print(" ".join(cmd))
with suppress(subprocess.CalledProcessError, KeyboardInterrupt):
subprocess.run(cmd, check=True, text=True)
if __name__ == "__main__":
run()

View File

@ -0,0 +1,63 @@
#!/usr/bin/env bash
# Keep configuration files in sync with installer/config.yml.
set -u
error() {
echo >&2 "error: $*"
exit 1
}
command -v ed > /dev/null || error "ed command not found!"
CONFIG_ORIG_FILEPATH="installer/config.yml"
# set_config <value> <key...>
set_config() {
value="${1}" && shift
# Build sed query
query="/^${1}:/\n"
while [[ $# -gt 1 ]]; do
shift
query+="/${1}:/\n"
done
query+="s|\(${1}:\).*|\1 ${value}|\n"
query+="wq"
echo -e "$query" | ed --quiet "$CONFIG_FILEPATH" > /dev/null
}
set_docker_config() {
set_config "postgres" database host
set_config "rabbitmq" rabbitmq host
set_config "liquidsoap" playout liquidsoap_host
set_config "0.0.0.0" liquidsoap server_listen_address
set_config "icecast" stream outputs .default_icecast_output host
}
set_docker_config_template_vars() {
set_config "\${POSTGRES_PASSWORD}" database password
set_config "\${RABBITMQ_DEFAULT_PASS}" rabbitmq password
set_config "\${ICECAST_SOURCE_PASSWORD}" stream outputs .default_icecast_output source_password
set_config "\${ICECAST_ADMIN_PASSWORD}" stream outputs .default_icecast_output admin_password
}
CONFIG_FILEPATH="docker/config.yml"
cp "$CONFIG_ORIG_FILEPATH" "$CONFIG_FILEPATH"
set_docker_config
CONFIG_FILEPATH="docker/config.template.yml"
cp "$CONFIG_ORIG_FILEPATH" "$CONFIG_FILEPATH"
set_docker_config
set_docker_config_template_vars
CONFIG_FILEPATH="docker/example/config.yml"
cp "$CONFIG_ORIG_FILEPATH" "$CONFIG_FILEPATH"
set_docker_config
set_config "http://localhost:8080" general public_url
set_config "some_secret_api_key" general api_key

26
easypanel/code/tools/version.sh Executable file
View File

@ -0,0 +1,26 @@
#!/usr/bin/env bash
set -u
error() {
echo >&2 "error: $*"
exit 1
}
command -v git > /dev/null || error "git command not found!"
command -v tee > /dev/null || error "tee command not found!"
typeset -r version_file="VERSION"
if [[ "$(git rev-parse --is-inside-work-tree 2> /dev/null)" == "true" ]]; then
tag=$(git tag --points-at HEAD | tee "$version_file" || error "could not extract tag")
if [[ -z "$tag" ]]; then
ref="${GITHUB_REF_NAME:-$(git rev-parse --abbrev-ref HEAD || error "could not extract ref")}"
sha="${GITHUB_SHA:-$(git rev-parse HEAD || error "could not extract commit sha")}"
echo "$ref-$sha" > "$version_file"
fi
else
if [[ ! -f "$version_file" ]]; then
echo "could not detect version" > VERSION
fi
fi

View File

@ -79,6 +79,32 @@ async function main() {
const sanitized = removeContainerNamesAndPorts(raw); const sanitized = removeContainerNamesAndPorts(raw);
fs.writeFileSync(DEST, sanitized, 'utf8'); fs.writeFileSync(DEST, sanitized, 'utf8');
// Copy tools/ into code/ so relative mounts like ./tools work in the generated compose
const toolsSrc = path.resolve(__dirname, '../tools');
const toolsDest = path.resolve(DEST_DIR, 'tools');
try {
if (fs.existsSync(toolsSrc)) {
// remove existing dest if any
if (fs.existsSync(toolsDest)) {
fs.rmSync(toolsDest, { recursive: true, force: true });
}
// copy recursively
const { spawnSync } = require('child_process');
const res = spawnSync('cp', ['-a', toolsSrc, toolsDest]);
if (res.status !== 0) {
console.warn('warning: failed to copy tools directory to code/:', res.stderr && res.stderr.toString());
}
}
} catch (err) {
console.warn('warning copying tools:', err && err.message);
}
// Asegurar permisos ejecutables para scripts .sh dentro de code/tools
try {
const { spawnSync } = require('child_process');
spawnSync('find', [path.join(DEST_DIR, 'tools'), '-type', 'f', '-iname', '*.sh', '-exec', 'chmod', '0755', '{}', ';']);
} catch (e) {
// ignore
}
console.log('Preparado', DEST); console.log('Preparado', DEST);
} }

View File

@ -63,6 +63,17 @@ fi
# Copiar # Copiar
cp "$SRC" "$DEST" cp "$SRC" "$DEST"
# Copiar la carpeta tools al directorio generado para que los bind-mounts './tools' funcionen
if [ -d "$BASE_DIR/../tools" ]; then
rm -rf "$DEST_DIR/tools"
mkdir -p "$DEST_DIR"
cp -a "$BASE_DIR/../tools" "$DEST_DIR/"
fi
if [ -d "$DEST_DIR/tools" ]; then
# Asegurar permisos ejecutables para scripts .sh dentro de code/tools
find "$DEST_DIR/tools" -type f -iname '*.sh' -exec chmod 0755 {} \;
fi
# Eliminar container_name y ports keys (simplemente eliminamos las líneas que contienen 'container_name:' o 'ports:') # Eliminar container_name y ports keys (simplemente eliminamos las líneas que contienen 'container_name:' o 'ports:')
# Esto es similar a lo que hacen muchos ejemplos de EasyPanel. # Esto es similar a lo que hacen muchos ejemplos de EasyPanel.
if command -v perl >/dev/null 2>&1; then if command -v perl >/dev/null 2>&1; then
@ -79,3 +90,17 @@ else
fi fi
echo "Preparado $DEST para EasyPanel. Revisa variables de entorno en el README y súbelas en la UI de EasyPanel." echo "Preparado $DEST para EasyPanel. Revisa variables de entorno en el README y súbelas en la UI de EasyPanel."
# Resumen: listar scripts copiados y sus permisos
if [ -d "$DEST_DIR/tools" ]; then
echo
echo "Resumen - archivos en $DEST_DIR/tools:"
ls -l "$DEST_DIR/tools" | sed -n '1,200p'
echo
echo "Permisos verificables para scripts .sh (deben ser ejecutables):"
find "$DEST_DIR/tools" -type f -iname '*.sh' -exec ls -l {} \; || true
else
echo "Nota: no se encontró $DEST_DIR/tools"
fi
echo "update.sh completado correctamente. Si EasyPanel aún muestra errores, revisa los logs del contenedor 'config-generator'."