mirror of
https://gitlab.com/upRootNutrition/dotfiles.git
synced 2025-12-14 02:20:53 -06:00
test: setting up nas structure
This commit is contained in:
parent
8cd193ec49
commit
4225970826
747 changed files with 2938 additions and 4347 deletions
164
modules/nixos/homelab/orphans/comfyui/default.nix
Executable file
164
modules/nixos/homelab/orphans/comfyui/default.nix
Executable file
|
|
@ -0,0 +1,164 @@
|
|||
{
|
||||
flake,
|
||||
config,
|
||||
...
|
||||
}:
|
||||
let
|
||||
inherit (flake.config.services) instances;
|
||||
serviceCfg = instances.comfyui;
|
||||
localhost = instances.web.localhost.address1;
|
||||
host = serviceCfg.domains.url0;
|
||||
dns = instances.web.dns.provider0;
|
||||
dnsPath = "dns/${dns}";
|
||||
|
||||
in
|
||||
{
|
||||
virtualisation.docker = {
|
||||
enable = true;
|
||||
enableNvidia = true;
|
||||
autoPrune = {
|
||||
enable = true;
|
||||
dates = "weekly";
|
||||
};
|
||||
};
|
||||
|
||||
virtualisation.oci-containers = {
|
||||
backend = "docker";
|
||||
containers.comfyui = {
|
||||
image = "yanwk/comfyui-boot:cu126-slim";
|
||||
autoStart = true;
|
||||
|
||||
ports = [
|
||||
"${localhost}:${toString serviceCfg.ports.port0}:8188"
|
||||
];
|
||||
|
||||
volumes = [
|
||||
"${serviceCfg.varPaths.path0}:/root"
|
||||
"${serviceCfg.varPaths.path0}/models:/root/models"
|
||||
"${serviceCfg.varPaths.path0}/custom_nodes:/root/custom_nodes"
|
||||
"${serviceCfg.varPaths.path0}/output:/root/output"
|
||||
"${serviceCfg.varPaths.path0}/input:/root/input"
|
||||
"${serviceCfg.varPaths.path0}/user:/root/user"
|
||||
];
|
||||
|
||||
environment = {
|
||||
CLI_ARGS = "--listen 0.0.0.0 --port 8188 --preview-method auto --dont-print-server";
|
||||
NVIDIA_VISIBLE_DEVICES = "0";
|
||||
NVIDIA_DRIVER_CAPABILITIES = "compute,utility,graphics";
|
||||
};
|
||||
|
||||
extraOptions = [
|
||||
"--runtime=nvidia"
|
||||
"--gpus=device=0"
|
||||
# Memory limits to prevent OOM
|
||||
"--memory=32g"
|
||||
"--memory-swap=32g"
|
||||
"--shm-size=16g"
|
||||
# Security
|
||||
"--security-opt=no-new-privileges:true"
|
||||
# Network
|
||||
"--network=bridge"
|
||||
# Health check
|
||||
"--health-cmd=curl -f http://localhost:8188/ || exit 1"
|
||||
"--health-interval=30s"
|
||||
"--health-timeout=10s"
|
||||
"--health-retries=3"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
services.caddy = {
|
||||
enable = true;
|
||||
virtualHosts = {
|
||||
"${host}" = {
|
||||
extraConfig = ''
|
||||
basic_auth {
|
||||
{$CADDY_AUTH_USER} {$CADDY_AUTH_PASSWORD_HASH}
|
||||
}
|
||||
|
||||
# Main reverse proxy with WebSocket support
|
||||
reverse_proxy ${localhost}:${toString serviceCfg.ports.port0} {
|
||||
header_up Host {host}
|
||||
header_up X-Real-IP {remote}
|
||||
header_up X-Forwarded-For {remote}
|
||||
header_up X-Forwarded-Proto {scheme}
|
||||
|
||||
# WebSocket support - critical for ComfyUI real-time updates
|
||||
header_up Connection {>Connection}
|
||||
header_up Upgrade {>Upgrade}
|
||||
|
||||
# Longer timeouts for generation tasks
|
||||
transport http {
|
||||
read_timeout 300s
|
||||
write_timeout 300s
|
||||
}
|
||||
}
|
||||
|
||||
tls ${serviceCfg.ssl.cert} ${serviceCfg.ssl.key}
|
||||
|
||||
# Security headers
|
||||
header {
|
||||
Strict-Transport-Security "max-age=31536000; includeSubDomains; preload"
|
||||
X-Frame-Options "DENY"
|
||||
X-Content-Type-Options "nosniff"
|
||||
X-XSS-Protection "1; mode=block"
|
||||
Referrer-Policy "strict-origin-when-cross-origin"
|
||||
-Server
|
||||
}
|
||||
|
||||
# Logging
|
||||
log {
|
||||
output file /var/log/caddy/comfyui-access.log
|
||||
format json
|
||||
}
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
systemd.tmpfiles.rules = [
|
||||
"d ${serviceCfg.varPaths.path0} 755 root root -"
|
||||
"d ${serviceCfg.secretPaths.path0}/caddy 755 caddy caddy -"
|
||||
"d /var/log/caddy 755 caddy caddy -"
|
||||
];
|
||||
|
||||
systemd.services.caddy = {
|
||||
serviceConfig = {
|
||||
EnvironmentFile = config.sops.secrets."caddy/${serviceCfg.name}-auth".path;
|
||||
};
|
||||
};
|
||||
|
||||
boot.kernel.sysctl = {
|
||||
"kernel.shmmax" = 68719476736;
|
||||
"kernel.shmall" = 4194304;
|
||||
"vm.swappiness" = 1;
|
||||
"vm.dirty_ratio" = 15;
|
||||
"vm.dirty_background_ratio" = 5;
|
||||
};
|
||||
|
||||
systemd.services.docker-comfyui = {
|
||||
after = [
|
||||
"network-online.target"
|
||||
"docker.service"
|
||||
];
|
||||
wants = [ "network-online.target" ];
|
||||
requires = [ "docker.service" ];
|
||||
};
|
||||
|
||||
sops.secrets = {
|
||||
"caddy/${serviceCfg.name}-auth" = {
|
||||
owner = "caddy";
|
||||
group = "caddy";
|
||||
mode = "0400";
|
||||
};
|
||||
};
|
||||
|
||||
users.users.caddy.extraGroups = [ "acme" ];
|
||||
|
||||
security.acme.certs."${host}" = {
|
||||
dnsProvider = dns;
|
||||
environmentFile = config.sops.secrets.${dnsPath}.path;
|
||||
group = "caddy";
|
||||
};
|
||||
|
||||
}
|
||||
11
modules/nixos/homelab/orphans/default.nix
Executable file
11
modules/nixos/homelab/orphans/default.nix
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
let
|
||||
importList =
|
||||
let
|
||||
content = builtins.readDir ./.;
|
||||
dirContent = builtins.filter (n: content.${n} == "directory") (builtins.attrNames content);
|
||||
in
|
||||
map (name: ./. + "/${name}") dirContent;
|
||||
in
|
||||
{
|
||||
imports = importList;
|
||||
}
|
||||
189
modules/nixos/homelab/orphans/defenseio/default.nix
Executable file
189
modules/nixos/homelab/orphans/defenseio/default.nix
Executable file
|
|
@ -0,0 +1,189 @@
|
|||
{
|
||||
flake,
|
||||
pkgs,
|
||||
config,
|
||||
...
|
||||
}:
|
||||
let
|
||||
inherit (flake.config.people) user0;
|
||||
inherit (flake.config.machines) devices;
|
||||
|
||||
mars = devices.mars.name;
|
||||
ceres = devices.ceres.name;
|
||||
eris = devices.eris.name;
|
||||
deimos = devices.deimos.name;
|
||||
phobos = devices.phobos.name;
|
||||
|
||||
hostname = config.networking.hostName;
|
||||
|
||||
deviceLogic =
|
||||
var0: var1: var2: var3: var4:
|
||||
if hostname == ceres then
|
||||
var0
|
||||
else if hostname == eris then
|
||||
var1
|
||||
else if hostname == mars then
|
||||
var2
|
||||
else if hostname == deimos then
|
||||
var3
|
||||
else if hostname == phobos then
|
||||
var4
|
||||
else
|
||||
var0;
|
||||
|
||||
macOctet = deviceLogic "57" "58" "59" "60" "61";
|
||||
in
|
||||
|
||||
{
|
||||
microvm = {
|
||||
vms = {
|
||||
defenseio = {
|
||||
autostart = true;
|
||||
config =
|
||||
let
|
||||
ceresCpu = 28;
|
||||
erisCpu = 5;
|
||||
marsCpu = 18;
|
||||
deimosCpu = 4;
|
||||
phobosCpu = 4;
|
||||
|
||||
macAddress = "02:00:00:00:00:${macOctet}";
|
||||
workers = deviceLogic ceresCpu erisCpu marsCpu deimosCpu phobosCpu;
|
||||
in
|
||||
{
|
||||
environment.systemPackages = [
|
||||
pkgs.git
|
||||
pkgs.ncurses
|
||||
pkgs.python313
|
||||
];
|
||||
|
||||
microvm = {
|
||||
forwardPorts = [
|
||||
{
|
||||
from = "host";
|
||||
host.port = 2058;
|
||||
guest.port = 22;
|
||||
}
|
||||
];
|
||||
hypervisor = "qemu";
|
||||
interfaces = [
|
||||
{
|
||||
type = "user";
|
||||
id = "uservm-dfo";
|
||||
mac = macAddress;
|
||||
}
|
||||
];
|
||||
mem =
|
||||
let
|
||||
num = num: (num * 1024);
|
||||
ceresRam = num 45;
|
||||
erisRam = num 7;
|
||||
marsRam = num 30;
|
||||
deimosRam = num 7;
|
||||
phobosRam = num 7;
|
||||
in
|
||||
deviceLogic ceresRam erisRam marsRam deimosRam phobosRam;
|
||||
shares = [
|
||||
{
|
||||
mountPoint = "/nix/.ro-store";
|
||||
proto = "virtiofs";
|
||||
source = "/nix/store";
|
||||
tag = "read_only_nix_store";
|
||||
}
|
||||
{
|
||||
mountPoint = "/var/lib/defenseio-data";
|
||||
proto = "virtiofs";
|
||||
source = "/var/lib/defenseio-data";
|
||||
tag = "defenseio_data";
|
||||
}
|
||||
];
|
||||
vcpu = workers;
|
||||
};
|
||||
|
||||
networking.firewall.allowedTCPPorts = [
|
||||
22
|
||||
];
|
||||
|
||||
services = {
|
||||
openssh = {
|
||||
enable = true;
|
||||
settings.PasswordAuthentication = false;
|
||||
};
|
||||
};
|
||||
|
||||
system.stateVersion = "25.05";
|
||||
|
||||
systemd = {
|
||||
network = {
|
||||
enable = true;
|
||||
networks."20-user" = {
|
||||
matchConfig.MACAddress = macAddress;
|
||||
networkConfig = {
|
||||
DHCP = "yes";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
tmpfiles.rules = [
|
||||
"d /var/lib/defenseio-data 0755 root root - -"
|
||||
];
|
||||
|
||||
services = {
|
||||
defenseio-miner = {
|
||||
after = [ "network-online.target" ];
|
||||
description = "DefenseIOMiner - DFO token miner";
|
||||
serviceConfig = {
|
||||
Environment = [
|
||||
"PATH=/run/current-system/sw/bin"
|
||||
"TERM=xterm-256color"
|
||||
];
|
||||
ExecStartPre = pkgs.writeShellScript "setup-miner" ''
|
||||
# Create venv if not already present (persists on virtiofs mount)
|
||||
if [ ! -d /var/lib/defenseio-data/venv ]; then
|
||||
${pkgs.python313}/bin/python -m venv /var/lib/defenseio-data/venv
|
||||
fi
|
||||
|
||||
# Install/upgrade dependencies
|
||||
/var/lib/defenseio-data/venv/bin/pip install --upgrade pip
|
||||
/var/lib/defenseio-data/venv/bin/pip install requests pycardano cbor2 portalocker
|
||||
|
||||
# Clone repo if not already present
|
||||
if [ ! -d /var/lib/defenseio-data/MidnightMiner ]; then
|
||||
cd /var/lib/defenseio-data
|
||||
${pkgs.git}/bin/git clone https://github.com/djeanql/MidnightMiner.git
|
||||
else
|
||||
cd /var/lib/defenseio-data/MidnightMiner
|
||||
${pkgs.git}/bin/git pull
|
||||
fi
|
||||
|
||||
# Show current commit
|
||||
cd /var/lib/defenseio-data/MidnightMiner
|
||||
echo "Current commit: $(${pkgs.git}/bin/git log -1 --format='%h - %s')"
|
||||
'';
|
||||
ExecStart = pkgs.writeShellScript "run-miner" ''
|
||||
export PATH=/run/current-system/sw/bin:$PATH
|
||||
cd /var/lib/defenseio-data/MidnightMiner
|
||||
/var/lib/defenseio-data/venv/bin/python miner.py --defensio --workers ${toString workers} --no-donation --consolidate addr1q87k2jlckh6ujqx4ymkdd4jrhy6gukdtum0p77pdh5gqcw8ctl65fvaw097l32ta6m8hth3xu9cjfz70y34gs2mdfzlsj465th
|
||||
'';
|
||||
Restart = "always";
|
||||
RestartSec = 10;
|
||||
};
|
||||
wants = [ "network-online.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
time.timeZone = "America/Winnipeg";
|
||||
|
||||
users.users.root.openssh.authorizedKeys.keys = flake.config.people.users.${user0}.sshKeys;
|
||||
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
systemd.tmpfiles.rules = [
|
||||
"d /var/lib/defenseio-data 0751 microvm wheel - -"
|
||||
];
|
||||
|
||||
}
|
||||
301
modules/nixos/homelab/orphans/defenseioGpu/config/default.nix
Executable file
301
modules/nixos/homelab/orphans/defenseioGpu/config/default.nix
Executable file
|
|
@ -0,0 +1,301 @@
|
|||
{
|
||||
flake,
|
||||
pkgs,
|
||||
config,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
let
|
||||
cfg = config.services.gpu-miner;
|
||||
|
||||
# CUDA-enabled packages
|
||||
cudaPackages = pkgs.cudaPackages;
|
||||
|
||||
# Use GCC 13 for CUDA compilation (GCC 14 is too new for CUDA 12.8)
|
||||
# But we need GCC 14's runtime libraries for CXXABI_1.3.15
|
||||
gccCompiler = pkgs.gcc13;
|
||||
gccRuntime = pkgs.gcc14;
|
||||
|
||||
pythonEnv = pkgs.python312.withPackages (
|
||||
ps: with ps; [
|
||||
pip
|
||||
virtualenv
|
||||
]
|
||||
);
|
||||
|
||||
# Generate complete config.yaml file
|
||||
configFile = pkgs.writeText "config.yaml" ''
|
||||
miner:
|
||||
api_url: https://mine.defensio.io/api
|
||||
max_workers: ${toString cfg.maxWorkers}
|
||||
update_interval: 30
|
||||
retry_delay: 5
|
||||
max_retries: 3
|
||||
|
||||
wallet:
|
||||
consolidate_address: ${cfg.consolidateAddress}
|
||||
auto_register: true
|
||||
wallet_count: ${toString cfg.maxWorkers}
|
||||
|
||||
gpu:
|
||||
enabled: true
|
||||
batch_size: ${toString cfg.batchSize}
|
||||
device_id: ${toString cfg.gpuDeviceId}
|
||||
thread_count: 256
|
||||
|
||||
logging:
|
||||
level: info
|
||||
file: ${cfg.dataDir}/GPU-Miner/miner.log
|
||||
max_size: 10485760
|
||||
backup_count: 5
|
||||
|
||||
performance:
|
||||
target_hashrate: 0
|
||||
monitor_interval: 60
|
||||
stats_update_interval: 10
|
||||
|
||||
database:
|
||||
path: ${cfg.dataDir}/GPU-Miner/miner.db
|
||||
backup_enabled: true
|
||||
backup_interval: 3600
|
||||
'';
|
||||
in
|
||||
{
|
||||
options.services.gpu-miner = {
|
||||
enable = lib.mkEnableOption "GPU Miner for Defensio (DFO)";
|
||||
|
||||
dataDir = lib.mkOption {
|
||||
type = lib.types.path;
|
||||
default = "/var/lib/gpu-miner";
|
||||
description = "Directory where the miner data and venv will be stored";
|
||||
};
|
||||
|
||||
consolidateAddress = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "addr1q87k2jlckh6ujqx4ymkdd4jrhy6gukdtum0p77pdh5gqcw8ctl65fvaw097l32ta6m8hth3xu9cjfz70y34gs2mdfzlsj465th";
|
||||
description = "Cardano address for consolidating mined tokens";
|
||||
|
||||
};
|
||||
|
||||
maxWorkers = lib.mkOption {
|
||||
type = lib.types.int;
|
||||
default = 1;
|
||||
description = "Maximum number of worker threads";
|
||||
|
||||
};
|
||||
|
||||
batchSize = lib.mkOption {
|
||||
type = lib.types.int;
|
||||
default = 1000000;
|
||||
description = "GPU batch size for mining";
|
||||
};
|
||||
|
||||
gpuDeviceId = lib.mkOption {
|
||||
type = lib.types.int;
|
||||
default = 0;
|
||||
description = "GPU device ID to use for mining (use nvidia-smi to see available GPUs)";
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable {
|
||||
# Create the data directory
|
||||
systemd.tmpfiles.rules = [
|
||||
"d ${cfg.dataDir} 0755 root root - -"
|
||||
];
|
||||
|
||||
systemd.services.gpu-miner = {
|
||||
description = "GPU Miner for Defensio (DFO) tokens";
|
||||
after = [ "network-online.target" ];
|
||||
wants = [ "network-online.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
environment = {
|
||||
CUDA_PATH = "${cudaPackages.cudatoolkit}";
|
||||
# Use GCC 14 runtime libraries for CXXABI_1.3.15
|
||||
LD_LIBRARY_PATH = lib.makeLibraryPath [
|
||||
cudaPackages.cudatoolkit
|
||||
config.boot.kernelPackages.nvidiaPackages.latest
|
||||
gccRuntime.cc.lib
|
||||
];
|
||||
__GLX_VENDOR_LIBRARY_NAME = "nvidia";
|
||||
GBM_BACKEND = "nvidia-drm";
|
||||
};
|
||||
|
||||
path = [
|
||||
cudaPackages.cudatoolkit
|
||||
pkgs.git
|
||||
pythonEnv
|
||||
gccCompiler # GCC 13 for CUDA compilation
|
||||
pkgs.gnumake
|
||||
pkgs.patchelf
|
||||
pkgs.stdenv.cc.cc.lib
|
||||
pkgs.binutils
|
||||
];
|
||||
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
Restart = "always";
|
||||
RestartSec = 10;
|
||||
WorkingDirectory = cfg.dataDir;
|
||||
TimeoutStartSec = "5min";
|
||||
|
||||
# Setup script
|
||||
ExecStartPre = pkgs.writeShellScript "setup-gpu-miner" ''
|
||||
set -e
|
||||
cd ${cfg.dataDir}
|
||||
|
||||
# Clone or update the repository
|
||||
if [ ! -d GPU-Miner ]; then
|
||||
echo "Cloning GPU-Miner repository..."
|
||||
${pkgs.git}/bin/git clone https://github.com/Herolias/GPU-Miner.git
|
||||
else
|
||||
echo "Updating GPU-Miner repository..."
|
||||
cd GPU-Miner
|
||||
${pkgs.git}/bin/git pull || true
|
||||
cd ..
|
||||
fi
|
||||
|
||||
cd GPU-Miner
|
||||
|
||||
# Remove and recreate venv to ensure clean state
|
||||
if [ -d venv ]; then
|
||||
echo "Removing existing virtual environment..."
|
||||
rm -rf venv
|
||||
fi
|
||||
|
||||
echo "Creating virtual environment with Python 3.12..."
|
||||
${pythonEnv}/bin/python -m venv venv
|
||||
|
||||
echo "Installing dependencies..."
|
||||
venv/bin/pip install --upgrade pip
|
||||
|
||||
# Install base dependencies first
|
||||
echo "Installing base dependencies..."
|
||||
venv/bin/pip install colorama pyyaml portalocker
|
||||
|
||||
# Install from requirements.txt if available
|
||||
if [ -f requirements.txt ]; then
|
||||
echo "Installing from requirements.txt..."
|
||||
venv/bin/pip install -r requirements.txt
|
||||
else
|
||||
echo "Installing common dependencies..."
|
||||
venv/bin/pip install requests numpy pycardano cbor2
|
||||
fi
|
||||
|
||||
echo "Dependency installation complete."
|
||||
|
||||
# Patch the GPU binaries for NixOS
|
||||
if [ -d gpu_core/bin/linux ]; then
|
||||
echo "Patching GPU binaries for NixOS..."
|
||||
for so_file in gpu_core/bin/linux/*.so; do
|
||||
if [ -f "$so_file" ]; then
|
||||
echo "Patching $so_file"
|
||||
${pkgs.patchelf}/bin/patchelf --set-interpreter "$(cat ${pkgs.stdenv.cc}/nix-support/dynamic-linker)" "$so_file" 2>/dev/null || true
|
||||
${pkgs.patchelf}/bin/patchelf --set-rpath "${
|
||||
lib.makeLibraryPath [
|
||||
cudaPackages.cudatoolkit
|
||||
config.boot.kernelPackages.nvidiaPackages.latest
|
||||
gccRuntime.cc.lib
|
||||
pkgs.zlib
|
||||
pkgs.glib
|
||||
pkgs.glibc
|
||||
]
|
||||
}:$ORIGIN" "$so_file" || echo "Warning: Could not patch $so_file"
|
||||
${pkgs.patchelf}/bin/patchelf --shrink-rpath "$so_file" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
echo "Patching complete."
|
||||
fi
|
||||
|
||||
# Copy the config file
|
||||
echo "Creating config.yaml..."
|
||||
cp ${configFile} config.yaml
|
||||
|
||||
# Ensure database directory exists
|
||||
mkdir -p $(dirname ${cfg.dataDir}/GPU-Miner/miner.db)
|
||||
|
||||
echo "Setup complete."
|
||||
echo "Current commit: $(${pkgs.git}/bin/git log -1 --format='%h - %s')"
|
||||
'';
|
||||
|
||||
# Main execution script
|
||||
ExecStart = pkgs.writeShellScript "run-gpu-miner" ''
|
||||
set -e
|
||||
cd ${cfg.dataDir}/GPU-Miner
|
||||
|
||||
# Set CUDA environment
|
||||
export CUDA_PATH=${cudaPackages.cudatoolkit}
|
||||
export CUDA_ROOT=${cudaPackages.cudatoolkit}
|
||||
export CUDA_HOME=${cudaPackages.cudatoolkit}
|
||||
|
||||
# Use GCC 13 for CUDA compilation (compatible with CUDA 12.8)
|
||||
export CC=${gccCompiler}/bin/gcc
|
||||
export CXX=${gccCompiler}/bin/g++
|
||||
|
||||
# CRITICAL: Force nvcc to use GCC 13 by creating a wrapper script
|
||||
# The wrapper must pass both --compiler-bindir and ensure CUDA headers are found
|
||||
mkdir -p /tmp/nvcc-wrapper
|
||||
cat > /tmp/nvcc-wrapper/nvcc <<WRAPPER_EOF
|
||||
#!${pkgs.bash}/bin/bash
|
||||
# Add CUDA include path to help nvcc find cuda_runtime.h
|
||||
exec ${cudaPackages.cudatoolkit}/bin/nvcc \\
|
||||
--compiler-bindir ${gccCompiler}/bin \\
|
||||
-I${cudaPackages.cudatoolkit}/include \\
|
||||
"\$@"
|
||||
WRAPPER_EOF
|
||||
chmod +x /tmp/nvcc-wrapper/nvcc
|
||||
|
||||
# Put our wrapper at the front of PATH
|
||||
export PATH="/tmp/nvcc-wrapper:$PATH"
|
||||
|
||||
# Use GCC 14 runtime libraries for CXXABI_1.3.15 at runtime
|
||||
export LD_LIBRARY_PATH=${
|
||||
lib.makeLibraryPath [
|
||||
cudaPackages.cudatoolkit
|
||||
config.boot.kernelPackages.nvidiaPackages.latest
|
||||
gccRuntime.cc.lib # GCC 14 for runtime
|
||||
pkgs.zlib
|
||||
pkgs.glib
|
||||
pkgs.glibc
|
||||
]
|
||||
}
|
||||
# Add gpu_core binaries to library path
|
||||
export LD_LIBRARY_PATH="$PWD/gpu_core/bin/linux:$LD_LIBRARY_PATH"
|
||||
|
||||
export __GLX_VENDOR_LIBRARY_NAME=nvidia
|
||||
export GBM_BACKEND=nvidia-drm
|
||||
|
||||
# Verify GPU accessibility
|
||||
echo "Checking GPU availability..."
|
||||
if command -v nvidia-smi &> /dev/null; then
|
||||
nvidia-smi -L || echo "Warning: Could not list GPUs"
|
||||
fi
|
||||
|
||||
echo "Starting GPU miner..."
|
||||
echo "Python version: $(venv/bin/python --version)"
|
||||
echo "Compile-time GCC: ${gccCompiler}/bin/gcc ($(${gccCompiler}/bin/gcc --version | head -n1))"
|
||||
echo "Runtime GCC library: ${gccRuntime.cc.lib}/lib"
|
||||
echo "Working directory: $(pwd)"
|
||||
echo "NVCC wrapper: $(type -p nvcc)"
|
||||
|
||||
# Verify the critical library is available
|
||||
echo "Checking for CXXABI_1.3.15..."
|
||||
if strings ${gccRuntime.cc.lib}/lib/libstdc++.so.6 | grep -q CXXABI_1.3.15; then
|
||||
echo "✓ CXXABI_1.3.15 found in runtime libstdc++"
|
||||
else
|
||||
echo "✗ WARNING: CXXABI_1.3.15 not found"
|
||||
fi
|
||||
|
||||
# Test nvcc compilation
|
||||
echo "Testing nvcc compiler configuration..."
|
||||
/tmp/nvcc-wrapper/nvcc --version
|
||||
|
||||
# Run the miner
|
||||
exec venv/bin/python main.py --workers ${builtins.toString cfg.maxWorkers} 2>&1
|
||||
'';
|
||||
StandardOutput = "journal";
|
||||
StandardError = "journal";
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
25
modules/nixos/homelab/orphans/defenseioGpu/default.nix
Executable file
25
modules/nixos/homelab/orphans/defenseioGpu/default.nix
Executable file
|
|
@ -0,0 +1,25 @@
|
|||
{
|
||||
imports = [
|
||||
./config
|
||||
];
|
||||
|
||||
nix.settings = {
|
||||
substituters = [
|
||||
"https://cache.nixos.org"
|
||||
"https://cuda-maintainers.cachix.org"
|
||||
];
|
||||
trusted-public-keys = [
|
||||
"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="
|
||||
"cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E="
|
||||
];
|
||||
};
|
||||
|
||||
services.gpu-miner = {
|
||||
enable = true;
|
||||
dataDir = "/var/lib/gpu-miner";
|
||||
consolidateAddress = "addr1q87k2jlckh6ujqx4ymkdd4jrhy6gukdtum0p77pdh5gqcw8ctl65fvaw097l32ta6m8hth3xu9cjfz70y34gs2mdfzlsj465th";
|
||||
maxWorkers = 12;
|
||||
gpuDeviceId = 0;
|
||||
};
|
||||
|
||||
}
|
||||
BIN
modules/nixos/homelab/orphans/glance/assets/logo.png
Executable file
BIN
modules/nixos/homelab/orphans/glance/assets/logo.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 46 KiB |
7
modules/nixos/homelab/orphans/glance/config/branding.nix
Executable file
7
modules/nixos/homelab/orphans/glance/config/branding.nix
Executable file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
hide-footer = true;
|
||||
# logo-url = "/assets/logo.png";
|
||||
# app-name = "My Dashboard";
|
||||
# favicon-url = "/assets/logo.png";
|
||||
# app-icon-url = "/assets/logo.png";
|
||||
}
|
||||
50
modules/nixos/homelab/orphans/glance/config/pages.nix
Executable file
50
modules/nixos/homelab/orphans/glance/config/pages.nix
Executable file
|
|
@ -0,0 +1,50 @@
|
|||
{ config, flake, ... }:
|
||||
let
|
||||
widgetsPath = ./widgets;
|
||||
widgets = {
|
||||
jellyfin = import (widgetsPath + /jelly) { inherit config flake; };
|
||||
steam = import (widgetsPath + /steam);
|
||||
podcasts = import (widgetsPath + /podcasts.nix);
|
||||
calendar = import (widgetsPath + /calendar.nix);
|
||||
clock = import (widgetsPath + /clock.nix);
|
||||
weather = import (widgetsPath + /weather.nix);
|
||||
reddit = import (widgetsPath + /reddit.nix);
|
||||
videos = import (widgetsPath + /videos.nix);
|
||||
repos = import (widgetsPath + /repos.nix);
|
||||
};
|
||||
in
|
||||
[
|
||||
{
|
||||
columns = [
|
||||
{
|
||||
size = "full";
|
||||
widgets = [
|
||||
{
|
||||
type = "group";
|
||||
widgets = [
|
||||
widgets.podcasts
|
||||
widgets.videos
|
||||
];
|
||||
}
|
||||
widgets.reddit
|
||||
];
|
||||
}
|
||||
{
|
||||
size = "small";
|
||||
widgets = [
|
||||
widgets.steam
|
||||
widgets.repos
|
||||
];
|
||||
}
|
||||
{
|
||||
size = "small";
|
||||
widgets = [
|
||||
widgets.calendar
|
||||
widgets.weather
|
||||
widgets.clock
|
||||
];
|
||||
}
|
||||
];
|
||||
name = "Dashboard";
|
||||
}
|
||||
]
|
||||
16
modules/nixos/homelab/orphans/glance/config/server.nix
Executable file
16
modules/nixos/homelab/orphans/glance/config/server.nix
Executable file
|
|
@ -0,0 +1,16 @@
|
|||
{ flake, configHelpers, ... }:
|
||||
let
|
||||
inherit (flake.config.people) user0;
|
||||
inherit (flake.config.machines.devices) ceres;
|
||||
in
|
||||
{
|
||||
assets-path = "/home/${user0}/projects/dotfiles/modules/nixos/services/glance/assets";
|
||||
# host = configHelpers.host;
|
||||
# host = configHelpers.localhost;
|
||||
host = ceres.wireguard.ip0;
|
||||
port = configHelpers.service.ports.port0;
|
||||
# auth = {
|
||||
# secret-key = config.sops.secrets."${service.name}/key".path;
|
||||
# users.${user0}.password = config.sops.secrets."${service.name}-${user0}-pass".path;
|
||||
# };
|
||||
}
|
||||
7
modules/nixos/homelab/orphans/glance/config/theme.nix
Executable file
7
modules/nixos/homelab/orphans/glance/config/theme.nix
Executable file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
background-color = "232 23 18";
|
||||
contrast-multiplier = 1.2;
|
||||
primary-color = "220 83 75";
|
||||
positive-color = "105 48 72";
|
||||
negative-color = "351 74 73";
|
||||
}
|
||||
5
modules/nixos/homelab/orphans/glance/config/widgets/calendar.nix
Executable file
5
modules/nixos/homelab/orphans/glance/config/widgets/calendar.nix
Executable file
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
type = "calendar";
|
||||
title = "Calendar";
|
||||
style = "vertical-cards";
|
||||
}
|
||||
22
modules/nixos/homelab/orphans/glance/config/widgets/clock.nix
Executable file
22
modules/nixos/homelab/orphans/glance/config/widgets/clock.nix
Executable file
|
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
type = "clock";
|
||||
hour-format = "12h";
|
||||
timezones = [
|
||||
{
|
||||
timezone = "America/Winnipeg";
|
||||
label = "Winnipeg, MB";
|
||||
}
|
||||
{
|
||||
timezone = "Europe/Berlin";
|
||||
label = "Berlin, DE";
|
||||
}
|
||||
{
|
||||
timezone = "Asia/Kolkata";
|
||||
label = "Kolkata, IN";
|
||||
}
|
||||
{
|
||||
timezone = "Asia/Tokyo";
|
||||
label = "Tokyo, JP";
|
||||
}
|
||||
];
|
||||
}
|
||||
326
modules/nixos/homelab/orphans/glance/config/widgets/jelly/config/default.nix
Executable file
326
modules/nixos/homelab/orphans/glance/config/widgets/jelly/config/default.nix
Executable file
|
|
@ -0,0 +1,326 @@
|
|||
''
|
||||
{{ $mediaServer := .Options.StringOr "media-server" "" }}
|
||||
{{ $baseURL := .Options.StringOr "base-url" "" }}
|
||||
{{ $apiKey := .Options.StringOr "api-key" "" }}
|
||||
{{ $userName := .Options.StringOr "user-name" "" }}
|
||||
|
||||
{{ define "errorMsg" }}
|
||||
<div class="widget-error-header">
|
||||
<div class="color-negative size-h3">ERROR</div>
|
||||
<svg class="widget-error-icon" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" d="M12 9v3.75m-9.303 3.376c-.866 1.5.217 3.374 1.948 3.374h14.71c1.73 0 2.813-1.874 1.948-3.374L13.949 3.378c-.866-1.5-3.032-1.5-3.898 0L2.697 16.126ZM12 15.75h.007v.008H12v-.008Z"></path>
|
||||
</svg>
|
||||
</div>
|
||||
<p class="break-all">{{ . }}</p>
|
||||
{{ end }}
|
||||
|
||||
{{ if or
|
||||
(eq $mediaServer "")
|
||||
(eq $baseURL "")
|
||||
(eq $apiKey "")
|
||||
(and (eq $mediaServer "jellyfin") (eq $userName ""))
|
||||
}}
|
||||
{{ template "errorMsg" "Some required options are not set" }}
|
||||
{{ else }}
|
||||
|
||||
{{ $historyLength := .Options.StringOr "history-length" "10" }}
|
||||
{{ $mediaTypes := .Options.StringOr "media-types" "" }}
|
||||
{{ if eq $mediaServer "tautulli" }}
|
||||
{{ $mediaTypes = .Options.StringOr "media-types" "movie,episode,track" }}
|
||||
{{ else if or (eq $mediaServer "jellyfin") (eq $mediaServer "emby") }}
|
||||
{{ $mediaTypes = .Options.StringOr "media-types" "Movie,Episode,Audio" }}
|
||||
{{ end }}
|
||||
{{ $isSmallColumn := .Options.BoolOr "small-column" false }}
|
||||
{{ $isCompact := .Options.BoolOr "compact" true }}
|
||||
{{ $showThumbnail := .Options.BoolOr "show-thumbnail" false }}
|
||||
{{ $thumbAspectRatio := .Options.StringOr "thumbnail-aspect-ratio" "" }}
|
||||
{{ $showUser := .Options.BoolOr "show-user" true }}
|
||||
{{ $timeAbsolute := .Options.BoolOr "time-absolute" false }}
|
||||
{{ $timeFormat := .Options.StringOr "time-format" "Jan 02 15:04" }}
|
||||
|
||||
{{ $userID := "" }}
|
||||
{{ $historyRequestURL := "" }}
|
||||
{{ $usersRequestURL := "" }}
|
||||
{{ $historyCall := "" }}
|
||||
{{ $usersCall := "" }}
|
||||
{{ $history := "" }}
|
||||
{{ $users := "" }}
|
||||
|
||||
{{ if eq $mediaServer "plex" }}
|
||||
{{ $historyRequestURL = concat $baseURL "/status/sessions/history/all" }}
|
||||
{{ $historyCall = newRequest $historyRequestURL
|
||||
| withParameter "limit" $historyLength
|
||||
| withParameter "sort" "viewedAt:desc"
|
||||
| withHeader "Accept" "application/json"
|
||||
| withHeader "X-Plex-Token" $apiKey
|
||||
| getResponse }}
|
||||
|
||||
{{ if $historyCall.JSON.Exists "MediaContainer" }}
|
||||
{{ $history = $historyCall.JSON.Array "MediaContainer.Metadata" }}
|
||||
{{ else }}
|
||||
{{ template "errorMsg" (concat "Could not fetch " $mediaServer " API.") }}
|
||||
{{ end }}
|
||||
|
||||
{{ $usersRequestURL = concat $baseURL "/accounts" }}
|
||||
{{ $usersCall = newRequest $usersRequestURL
|
||||
| withHeader "Accept" "application/json"
|
||||
| withHeader "X-Plex-Token" $apiKey
|
||||
| getResponse }}
|
||||
{{ $users = $usersCall.JSON.Array "MediaContainer.Account" }}
|
||||
|
||||
{{ else if eq $mediaServer "tautulli" }}
|
||||
{{ $historyRequestURL = concat $baseURL "/api/v2" }}
|
||||
{{ $historyCall = newRequest $historyRequestURL
|
||||
| withParameter "apikey" $apiKey
|
||||
| withParameter "cmd" "get_history"
|
||||
| withParameter "length" $historyLength
|
||||
| withParameter "media_type" $mediaTypes
|
||||
| withHeader "Accept" "application/json"
|
||||
| getResponse }}
|
||||
|
||||
{{ if eq $historyCall.Response.StatusCode 200 }}
|
||||
{{ $history = $historyCall.JSON.Array "response.data.data" }}
|
||||
{{ else }}
|
||||
{{ template "errorMsg" (concat "Could not fetch " $mediaServer " API.") }}
|
||||
{{ end }}
|
||||
|
||||
{{ else if or (eq $mediaServer "jellyfin") (eq $mediaServer "emby") }}
|
||||
{{ $usersRequestURL = concat $baseURL "/Users" }}
|
||||
{{ $usersCall = newRequest $usersRequestURL
|
||||
| withParameter "api_key" $apiKey
|
||||
| withHeader "Accept" "application/json"
|
||||
| getResponse }}
|
||||
|
||||
{{ $usersList := $usersCall.JSON.Array "" }}
|
||||
{{ range $i, $user := $usersList }}
|
||||
{{ if eq ($user.String "Name") $userName }}
|
||||
{{ $userID = $user.String "Id" }}
|
||||
{{ break }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
{{ if eq $userID "" }}
|
||||
{{ template "errorMsg" (concat "User '" $userName "' not found.") }}
|
||||
{{ end }}
|
||||
|
||||
{{ $historyRequestURL = concat $baseURL "/Users/" $userID "/Items" }}
|
||||
{{ $historyCall = newRequest $historyRequestURL
|
||||
| withParameter "api_key" $apiKey
|
||||
| withParameter "Limit" $historyLength
|
||||
| withParameter "IncludeItemTypes" $mediaTypes
|
||||
| withParameter "Recursive" "true"
|
||||
| withParameter "isPlayed" "true"
|
||||
| withParameter "sortBy" "DatePlayed"
|
||||
| withParameter "sortOrder" "Descending"
|
||||
| withParameter "Fields" "UserDataLastPlayedDate"
|
||||
| withHeader "Accept" "application/json"
|
||||
| getResponse }}
|
||||
|
||||
{{ $history = $historyCall.JSON.Array "Items" }}
|
||||
{{ end }}
|
||||
|
||||
{{ if and (eq $historyCall.Response.StatusCode 200) (eq (len $history) 0) }}
|
||||
<p>Nothing has been played. Start streaming something!</p>
|
||||
{{ else }}
|
||||
<div class="carousel-container show-right-cutoff">
|
||||
<div class="cards-horizontal carousel-items-container">
|
||||
{{ range $n, $item := $history }}
|
||||
{{ $mediaType := "" }}
|
||||
{{ $isMovie := false }}
|
||||
{{ $isShows := false }}
|
||||
{{ $isMusic := false }}
|
||||
{{ $title := "" }}
|
||||
{{ $showTitle := "" }}
|
||||
{{ $showSeason := "" }}
|
||||
{{ $showEpisode := "" }}
|
||||
{{ $artist := "" }}
|
||||
{{ $albumTitle := "" }}
|
||||
{{ $thumbURL := "" }}
|
||||
{{ $playedAt := "" }}
|
||||
|
||||
{{ if eq $mediaServer "plex" }}
|
||||
{{ $userID = $item.Int "accountID" }}
|
||||
{{ range $n, $u := $users }}
|
||||
{{ if eq $userID ($u.Int "id") }}
|
||||
{{ $userName = $u.String "name" }}
|
||||
{{ break }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
|
||||
{{ $mediaType = $item.String "type" }}
|
||||
{{ $isMovie = eq $mediaType "movie" }}
|
||||
{{ $isShows = eq $mediaType "episode" }}
|
||||
{{ $isMusic = eq $mediaType "track" }}
|
||||
|
||||
{{ $title = $item.String "title" }}
|
||||
{{ if $isShows }}
|
||||
{{ $showTitle = $item.String "grandparentTitle" }}
|
||||
{{ $showSeason = $item.String "parentIndex" }}
|
||||
{{ $showEpisode = $item.String "index" }}
|
||||
{{ else if $isMusic }}
|
||||
{{ $artist = $item.String "grandparentTitle" }}
|
||||
{{ $albumTitle = $item.String "parentTitle" }}
|
||||
{{ end }}
|
||||
|
||||
{{ $thumbID := $item.String "thumb" }}
|
||||
{{ if or $isShows $isMusic}}
|
||||
{{ $thumbID = $item.String "parentThumb" }}
|
||||
{{ end }}
|
||||
{{ $thumbURL = concat $baseURL $thumbID "?X-Plex-Token=" $apiKey }}
|
||||
|
||||
{{ $time := $item.String "viewedAt" }}
|
||||
{{ if $timeAbsolute }}
|
||||
{{ $playedAt = $time | parseLocalTime "unix" | formatTime $timeFormat }}
|
||||
{{ else }}
|
||||
{{ $playedAt = $time | parseRelativeTime "unix" }}
|
||||
{{ end }}
|
||||
|
||||
{{ else if eq $mediaServer "tautulli" }}
|
||||
{{ $userName = $item.String "user" }}
|
||||
{{ $mediaType = $item.String "media_type" }}
|
||||
{{ $isMovie = eq $mediaType "movie" }}
|
||||
{{ $isShows = eq $mediaType "episode" }}
|
||||
{{ $isMusic = eq $mediaType "track" }}
|
||||
|
||||
{{ $title = $item.String "title" }}
|
||||
{{ if $isShows }}
|
||||
{{ $showTitle = $item.String "grandparent_title" }}
|
||||
{{ $showSeason = $item.String "parent_media_index" }}
|
||||
{{ $showEpisode = $item.String "media_index" }}
|
||||
{{ else if $isMusic }}
|
||||
{{ $artist = $item.String "grandparent_title" }}
|
||||
{{ $albumTitle = $item.String "parent_title" }}
|
||||
{{ end }}
|
||||
|
||||
{{ $thumbID := $item.String "thumb" }}
|
||||
{{ $thumbURL = concat $baseURL "/api/v2?apikey=" $apiKey "&cmd=pms_image_proxy&img=" $thumbID }}
|
||||
|
||||
{{ $time := $item.String "date" }}
|
||||
{{ if $timeAbsolute }}
|
||||
{{ $playedAt = $time | parseLocalTime "unix" | formatTime $timeFormat }}
|
||||
{{ else }}
|
||||
{{ $playedAt = $time | parseRelativeTime "unix" }}
|
||||
{{ end }}
|
||||
|
||||
{{ else if or (eq $mediaServer "jellyfin") (eq $mediaServer "emby") }}
|
||||
{{ $mediaType = $item.String "Type" }}
|
||||
{{ $isMovie = eq $mediaType "Movie" }}
|
||||
{{ $isShows = eq $mediaType "Episode" }}
|
||||
{{ $isMusic = eq $mediaType "Audio" }}
|
||||
|
||||
{{ $title = $item.String "Name" }}
|
||||
{{ if $isShows }}
|
||||
{{ $showTitle = $item.String "SeriesName" }}
|
||||
{{ $showSeason = $item.String "ParentIndexNumber" }}
|
||||
{{ $showEpisode = $item.String "IndexNumber" }}
|
||||
{{ else if $isMusic }}
|
||||
{{ $artist = $item.String "AlbumArtist" }}
|
||||
{{ $albumTitle = $item.String "Album" }}
|
||||
{{ end }}
|
||||
|
||||
{{ $thumbID := $item.String "Id" }}
|
||||
{{ if $isShows }}
|
||||
{{ $thumbID = $item.String "SeasonId" }}
|
||||
{{ end }}
|
||||
{{ $thumbURL = concat $baseURL "/Items/" $thumbID "/Images/Primary?api_key=" $apiKey }}
|
||||
|
||||
{{ $time := $item.String "UserData.LastPlayedDate" }}
|
||||
{{ if $timeAbsolute }}
|
||||
{{ $playedAt = $time | parseLocalTime "rfc3339" | formatTime $timeFormat }}
|
||||
{{ else }}
|
||||
{{ $playedAt = $time | parseRelativeTime "rfc3339" }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
|
||||
{{ $showInfoFormat := concat "Season " $showSeason " Episode " $showEpisode}}
|
||||
{{ if $isCompact }}
|
||||
{{ $showInfoFormat = concat "S" $showSeason "E" $showEpisode}}
|
||||
{{ end }}
|
||||
|
||||
<div class="card widget-content-frame">
|
||||
{{ if $showThumbnail }}
|
||||
<img src="{{ $thumbURL | safeURL }}"
|
||||
alt="{{ $title }} thumbnail"
|
||||
loading="lazy"
|
||||
class="media-server-thumbnail shrink-0"
|
||||
style="
|
||||
object-fit: cover;
|
||||
border-radius: var(--border-radius) var(--border-radius) 0 0;
|
||||
{{ if eq $thumbAspectRatio "square" }}
|
||||
aspect-ratio: 1;
|
||||
{{ else if eq $thumbAspectRatio "portrait" }}
|
||||
aspect-ratio: 3/4;
|
||||
{{ else if eq $thumbAspectRatio "landscape" }}
|
||||
aspect-ratio: 4/3;
|
||||
{{ else }}
|
||||
aspect-ratio: initial;
|
||||
{{ end }}
|
||||
"
|
||||
/>
|
||||
{{ end }}
|
||||
|
||||
<div class="grow padding-inline-widget margin-top-10 margin-bottom-10">
|
||||
<ul class="flex flex-column justify-evenly margin-bottom-3 {{if $isSmallColumn}}size-h6{{end}}" style="height: 100%;">
|
||||
{{ if $isCompact }}
|
||||
<ul class="list-horizontal-text flex-nowrap">
|
||||
{{ if $showUser }}
|
||||
<li class="color-primary text-truncate">{{ $userName }}</li>
|
||||
{{ end }}
|
||||
|
||||
{{ if $timeAbsolute }}
|
||||
<li class="text-truncate">{{ $playedAt }}</li>
|
||||
{{ else }}
|
||||
<li class="shrink-0">
|
||||
<span {{ $playedAt }}></span>
|
||||
{{ if not $showUser }}
|
||||
<span> ago</span>
|
||||
{{ end }}
|
||||
</li>
|
||||
{{ end }}
|
||||
</ul>
|
||||
|
||||
{{ if $isShows }}
|
||||
<ul class="list-horizontal-text flex-nowrap">
|
||||
<li class="text-truncate">{{ $showInfoFormat }}</li>
|
||||
<li class="text-truncate">{{ $showTitle }}</li>
|
||||
</ul>
|
||||
{{ else if $isMusic }}
|
||||
<ul class="list-horizontal-text flex-nowrap">
|
||||
<li class="text-truncate">{{ $artist }}</li>
|
||||
<li class="text-truncate">{{ $albumTitle }}</li>
|
||||
</ul>
|
||||
{{ end }}
|
||||
|
||||
<li class="text-truncate">{{ $title }}</li>
|
||||
{{ else }}
|
||||
{{ if $showUser }}
|
||||
<li class="color-primary text-truncate">{{ $userName }}</li>
|
||||
{{ end }}
|
||||
|
||||
{{ if $timeAbsolute }}
|
||||
<li class="text-truncate">{{ $playedAt }}</li>
|
||||
{{ else }}
|
||||
<li class="text-truncate">
|
||||
<span {{ $playedAt }}></span>
|
||||
<span> ago</span>
|
||||
</li>
|
||||
{{ end }}
|
||||
|
||||
{{ if $isShows }}
|
||||
<li class="text-truncate">{{ $showTitle }}</li>
|
||||
<li class="text-truncate">{{ $showInfoFormat }}</li>
|
||||
{{ else if $isMusic }}
|
||||
<li class="text-truncate">{{ $artist }}</li>
|
||||
<li class="text-truncate">{{ $albumTitle }}</li>
|
||||
{{ end }}
|
||||
|
||||
<li class="text-truncate">{{ $title }}</li>
|
||||
{{ end }}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
{{ end }}
|
||||
</div>
|
||||
</div>
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
''
|
||||
30
modules/nixos/homelab/orphans/glance/config/widgets/jelly/default.nix
Executable file
30
modules/nixos/homelab/orphans/glance/config/widgets/jelly/default.nix
Executable file
|
|
@ -0,0 +1,30 @@
|
|||
{ config, flake, ... }:
|
||||
let
|
||||
inherit (flake.config.people) user0;
|
||||
inherit (flake.config.people.users.${user0}) name;
|
||||
inherit (flake.config.services.instances) glance jellyfin web;
|
||||
service = glance;
|
||||
jellyfinUserName = name;
|
||||
jellyfinHost = "https://${jellyfin.subdomain}.${web.domains.url0}";
|
||||
in
|
||||
{
|
||||
type = "custom-api";
|
||||
title = "Jellyfin History";
|
||||
frameless = true;
|
||||
cache = "5m";
|
||||
options = {
|
||||
media-server = "jellyfin";
|
||||
base-url = jellyfinHost;
|
||||
api-key = config.sops.secrets."${service.name}-${jellyfin.name}".path;
|
||||
user-name = jellyfinUserName;
|
||||
history-length = "10";
|
||||
small-column = true;
|
||||
compact = true;
|
||||
show-thumbnail = false;
|
||||
thumbnail-aspect-ratio = "default";
|
||||
show-user = true;
|
||||
time-absolute = false;
|
||||
time-format = "Jan 02 15:04";
|
||||
};
|
||||
template = import ./config;
|
||||
}
|
||||
6
modules/nixos/homelab/orphans/glance/config/widgets/monitor.nix
Executable file
6
modules/nixos/homelab/orphans/glance/config/widgets/monitor.nix
Executable file
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
type = "monitor";
|
||||
cache = "1m";
|
||||
title = "Services";
|
||||
|
||||
}
|
||||
22
modules/nixos/homelab/orphans/glance/config/widgets/podcasts.nix
Executable file
22
modules/nixos/homelab/orphans/glance/config/widgets/podcasts.nix
Executable file
|
|
@ -0,0 +1,22 @@
|
|||
let
|
||||
podcastHelper = url: title: {
|
||||
url = url;
|
||||
title = title;
|
||||
hide-description = true;
|
||||
hide-categories = true;
|
||||
};
|
||||
podcastData = [
|
||||
(podcastHelper "https://sigmanutrition.libsyn.com/rss/" "Sigma Nutrition Radio")
|
||||
(podcastHelper "https://wakingup.libsyn.com/rss" "Making Sense with Sam Harris")
|
||||
(podcastHelper "https://feeds.simplecast.com/uNKL_XD_" "Docs Who Lift")
|
||||
(podcastHelper "https://feeds.redcircle.com/677da6c9-d33f-49df-95d7-dc8821b797b4" "Mayo Clinic on Nutrition")
|
||||
];
|
||||
podcastEntries = builtins.map (podcast: podcast) podcastData;
|
||||
in
|
||||
{
|
||||
type = "rss";
|
||||
title = "Podcasts";
|
||||
style = "detailed-list";
|
||||
collapse-after = 3;
|
||||
feeds = podcastEntries;
|
||||
}
|
||||
21
modules/nixos/homelab/orphans/glance/config/widgets/reddit.nix
Executable file
21
modules/nixos/homelab/orphans/glance/config/widgets/reddit.nix
Executable file
|
|
@ -0,0 +1,21 @@
|
|||
let
|
||||
subList = [
|
||||
"privacy"
|
||||
"selfhosted"
|
||||
"vegan"
|
||||
];
|
||||
|
||||
subredditEntries = builtins.map (sub: {
|
||||
type = "reddit";
|
||||
subreddit = sub;
|
||||
collapse-after = 5;
|
||||
show-thumbnails = true;
|
||||
sort-by = "top";
|
||||
top-period = "day";
|
||||
}) subList;
|
||||
|
||||
in
|
||||
{
|
||||
type = "group";
|
||||
widgets = subredditEntries;
|
||||
}
|
||||
15
modules/nixos/homelab/orphans/glance/config/widgets/repos.nix
Executable file
15
modules/nixos/homelab/orphans/glance/config/widgets/repos.nix
Executable file
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
type = "releases";
|
||||
show-source-icon = true;
|
||||
collapse-after = 7;
|
||||
repositories = [
|
||||
"jmshrv/finamp"
|
||||
"futo-org/android-keyboard"
|
||||
"hyprwm/Hyprland"
|
||||
"mollyim/mollyim-android"
|
||||
"jarnedemeulemeester/findroid"
|
||||
"brandonp2412/Flexify"
|
||||
"accrescent/accrescent"
|
||||
"ImranR98/Obtainium"
|
||||
];
|
||||
}
|
||||
14
modules/nixos/homelab/orphans/glance/config/widgets/steam/config/default.nix
Executable file
14
modules/nixos/homelab/orphans/glance/config/widgets/steam/config/default.nix
Executable file
|
|
@ -0,0 +1,14 @@
|
|||
''
|
||||
<ul class="list list-gap-10 collapsible-container" data-collapse-after="5">
|
||||
{{ range .JSON.Array "specials.items" }}
|
||||
<li>
|
||||
<a class="size-h4 color-highlight block text-truncate" href="https://store.steampowered.com/app/{{ .Int "id" }}/">{{ .String "name" }}</a>
|
||||
<ul class="list-horizontal-text">
|
||||
<li>{{ .Int "final_price" | toFloat | mul 0.01 | printf "$%.2f" }}</li>
|
||||
{{ $discount := .Int "discount_percent" }}
|
||||
<li{{ if ge $discount 1 }} class="color-positive"{{ end }}>{{ $discount }}% off</li>
|
||||
</ul>
|
||||
</li>
|
||||
{{ end }}
|
||||
</ul>
|
||||
''
|
||||
7
modules/nixos/homelab/orphans/glance/config/widgets/steam/default.nix
Executable file
7
modules/nixos/homelab/orphans/glance/config/widgets/steam/default.nix
Executable file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
type = "custom-api";
|
||||
title = "Steam Specials";
|
||||
cache = "12h";
|
||||
url = "https://store.steampowered.com/api/featuredcategories?cc=ca";
|
||||
template = import ./config;
|
||||
}
|
||||
14
modules/nixos/homelab/orphans/glance/config/widgets/videos.nix
Executable file
14
modules/nixos/homelab/orphans/glance/config/widgets/videos.nix
Executable file
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
type = "videos";
|
||||
style = "vertical-list";
|
||||
collapse-after = 6;
|
||||
channels = [
|
||||
"UCfQgsKhHjSyRLOp9mnffqVg"
|
||||
"UCld68syR8Wi-GY_n4CaoJGA"
|
||||
"UChIs72whgZI9w6d6FhwGGHA"
|
||||
"UCes5DW7sk9WU8oqE9HGJdpg"
|
||||
"UCOksmJqNzaYLhplfs7Sl8cA"
|
||||
"UCQNmHyGAKqzOT_JsVEs4eag"
|
||||
"UCd8rTrNRM6DYR4TXgSqNw8g"
|
||||
];
|
||||
}
|
||||
7
modules/nixos/homelab/orphans/glance/config/widgets/weather.nix
Executable file
7
modules/nixos/homelab/orphans/glance/config/widgets/weather.nix
Executable file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
type = "weather";
|
||||
title = "Weather";
|
||||
units = "metric";
|
||||
hour-format = "12h";
|
||||
location = "Winnipeg, Manitoba, Canada";
|
||||
}
|
||||
82
modules/nixos/homelab/orphans/glance/default.nix
Executable file
82
modules/nixos/homelab/orphans/glance/default.nix
Executable file
|
|
@ -0,0 +1,82 @@
|
|||
{ config, flake, ... }:
|
||||
let
|
||||
inherit (flake.config.services.instances)
|
||||
glance
|
||||
jellyfin
|
||||
web
|
||||
;
|
||||
inherit (flake.config.machines.devices) ceres mars deimos;
|
||||
configHelpers = {
|
||||
service = glance;
|
||||
hostname = config.networking.hostName;
|
||||
localhost = web.localhost.address1;
|
||||
host = configHelpers.service.domains.url0;
|
||||
};
|
||||
configPath = ./config;
|
||||
configImports = {
|
||||
server = import (configPath + /server.nix) { inherit flake configHelpers; };
|
||||
branding = import (configPath + /branding.nix);
|
||||
theme = import (configPath + /theme.nix);
|
||||
pages = import (configPath + /pages.nix) { inherit config flake; };
|
||||
};
|
||||
in
|
||||
{
|
||||
services = {
|
||||
glance = {
|
||||
enable = true;
|
||||
settings = configImports;
|
||||
};
|
||||
caddy = {
|
||||
virtualHosts = {
|
||||
"${configHelpers.host}" = {
|
||||
extraConfig = ''
|
||||
@allowed_ips {
|
||||
remote_ip ${mars.wireguard.ip0} ${deimos.wireguard.ip0}
|
||||
}
|
||||
|
||||
handle @allowed_ips {
|
||||
redir /.well-known/carddav /remote.php/dav/ 301
|
||||
redir /.well-known/caldav /remote.php/dav/ 301
|
||||
reverse_proxy ${ceres.wireguard.ip0}:${toString configHelpers.service.ports.port0}
|
||||
}
|
||||
handle {
|
||||
respond "Access Denied" 403
|
||||
}
|
||||
tls ${configHelpers.service.ssl.cert} ${configHelpers.service.ssl.key}
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
sops =
|
||||
let
|
||||
sopsPath = secret: {
|
||||
path = "/run/secrets/${configHelpers.service.name}-${secret}";
|
||||
owner = "root";
|
||||
group = "root";
|
||||
mode = "644";
|
||||
};
|
||||
in
|
||||
{
|
||||
secrets = builtins.listToAttrs (
|
||||
map
|
||||
(secret: {
|
||||
name = "${configHelpers.service.name}/${secret}";
|
||||
value = sopsPath secret;
|
||||
})
|
||||
[
|
||||
# "key"
|
||||
# "${user0}-pass"
|
||||
jellyfin.name
|
||||
]
|
||||
);
|
||||
};
|
||||
|
||||
networking = {
|
||||
firewall = {
|
||||
interfaces.wg0.allowedTCPPorts = [
|
||||
configHelpers.service.ports.port0
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
173
modules/nixos/homelab/orphans/midnight/default.nix
Executable file
173
modules/nixos/homelab/orphans/midnight/default.nix
Executable file
|
|
@ -0,0 +1,173 @@
|
|||
{
|
||||
flake,
|
||||
config,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
let
|
||||
inherit (flake.config.people) user0;
|
||||
inherit (flake.config.machines) devices;
|
||||
|
||||
mars = devices.mars.name;
|
||||
ceres = devices.ceres.name;
|
||||
eris = devices.eris.name;
|
||||
deimos = devices.deimos.name;
|
||||
phobos = devices.phobos.name;
|
||||
|
||||
hostname = config.networking.hostName;
|
||||
|
||||
deviceLogic =
|
||||
var0: var1: var2: var3: var4:
|
||||
if hostname == ceres then
|
||||
var0
|
||||
else if hostname == eris then
|
||||
var1
|
||||
else if hostname == mars then
|
||||
var2
|
||||
else if hostname == deimos then
|
||||
var3
|
||||
else if hostname == phobos then
|
||||
var4
|
||||
else
|
||||
var0;
|
||||
|
||||
macOctet = deviceLogic "57" "58" "59" "60" "61";
|
||||
in
|
||||
{
|
||||
microvm = {
|
||||
vms = {
|
||||
miner = {
|
||||
autostart = true;
|
||||
config =
|
||||
let
|
||||
macAddress = "02:00:00:00:00:${macOctet}";
|
||||
workers = deviceLogic 35 4 18 5 6;
|
||||
in
|
||||
{
|
||||
environment.systemPackages = [
|
||||
pkgs.git
|
||||
pkgs.ncurses
|
||||
pkgs.python313
|
||||
];
|
||||
|
||||
microvm = {
|
||||
forwardPorts = [
|
||||
{
|
||||
from = "host";
|
||||
host.port = 2057;
|
||||
guest.port = 22;
|
||||
}
|
||||
];
|
||||
hypervisor = "qemu";
|
||||
interfaces = [
|
||||
{
|
||||
type = "tap";
|
||||
id = "vm-miner";
|
||||
mac = "02:00:00:00:57:${macOctet}";
|
||||
}
|
||||
{
|
||||
type = "user";
|
||||
id = "uservm-miner";
|
||||
mac = macAddress;
|
||||
}
|
||||
];
|
||||
mem = deviceLogic 45000 5120 22000 6144 7168;
|
||||
shares = [
|
||||
{
|
||||
mountPoint = "/nix/.ro-store";
|
||||
proto = "virtiofs";
|
||||
source = "/nix/store";
|
||||
tag = "read_only_nix_store";
|
||||
}
|
||||
{
|
||||
mountPoint = "/var/lib/midnight-data";
|
||||
proto = "virtiofs";
|
||||
source = "/var/lib/midnight-data";
|
||||
tag = "midnight_data";
|
||||
}
|
||||
];
|
||||
vcpu = workers;
|
||||
};
|
||||
|
||||
networking.firewall.allowedTCPPorts = [
|
||||
22
|
||||
];
|
||||
|
||||
services = {
|
||||
openssh = {
|
||||
enable = true;
|
||||
settings.PasswordAuthentication = false;
|
||||
};
|
||||
};
|
||||
|
||||
system.stateVersion = "25.05";
|
||||
|
||||
systemd = {
|
||||
network = {
|
||||
enable = true;
|
||||
networks."20-user" = {
|
||||
matchConfig.MACAddress = macAddress;
|
||||
networkConfig = {
|
||||
DHCP = "yes";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
tmpfiles.rules = [
|
||||
"d /var/lib/midnight-data 0755 root root - -"
|
||||
];
|
||||
|
||||
services = {
|
||||
midnight-miner = {
|
||||
after = [ "network-online.target" ];
|
||||
description = "MidnightMiner - Cardano NIGHT token miner";
|
||||
serviceConfig = {
|
||||
Environment = [
|
||||
"PATH=/run/current-system/sw/bin"
|
||||
"TERM=xterm-256color"
|
||||
];
|
||||
ExecStartPre = pkgs.writeShellScript "setup-miner" ''
|
||||
# Create venv if not already present (persists on virtiofs mount)
|
||||
if [ ! -d /var/lib/midnight-data/venv ]; then
|
||||
${pkgs.python313}/bin/python -m venv /var/lib/midnight-data/venv
|
||||
fi
|
||||
|
||||
# Install/upgrade dependencies
|
||||
/var/lib/midnight-data/venv/bin/pip install --upgrade pip
|
||||
/var/lib/midnight-data/venv/bin/pip install requests pycardano cbor2 portalocker
|
||||
|
||||
# Clone repo if not already present
|
||||
if [ ! -d /var/lib/midnight-data/MidnightMiner ]; then
|
||||
cd /var/lib/midnight-data
|
||||
${pkgs.git}/bin/git clone https://github.com/djeanql/MidnightMiner.git
|
||||
else
|
||||
cd /var/lib/midnight-data/MidnightMiner
|
||||
${pkgs.git}/bin/git pull
|
||||
fi
|
||||
'';
|
||||
ExecStart = pkgs.writeShellScript "run-miner" ''
|
||||
export PATH=/run/current-system/sw/bin:$PATH
|
||||
cd /var/lib/midnight-data/MidnightMiner
|
||||
/var/lib/midnight-data/venv/bin/python miner.py --workers ${toString workers} --no-donation
|
||||
'';
|
||||
Restart = "always";
|
||||
RestartSec = 10;
|
||||
};
|
||||
wants = [ "network-online.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
time.timeZone = "America/Winnipeg";
|
||||
|
||||
users.users.root.openssh.authorizedKeys.keys = flake.config.people.users.${user0}.sshKeys;
|
||||
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
systemd.tmpfiles.rules = [
|
||||
"d /var/lib/midnight-data 0751 microvm wheel - -"
|
||||
];
|
||||
}
|
||||
11
modules/nixos/homelab/orphans/ollama/default.nix
Executable file
11
modules/nixos/homelab/orphans/ollama/default.nix
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
let
|
||||
importList =
|
||||
let
|
||||
content = builtins.readDir ./.;
|
||||
dirContent = builtins.filter (n: content.${n} == "directory") (builtins.attrNames content);
|
||||
in
|
||||
map (name: ./. + "/${name}") dirContent;
|
||||
in
|
||||
{
|
||||
imports = importList;
|
||||
}
|
||||
75
modules/nixos/homelab/orphans/ollama/ollamaCeres/default.nix
Executable file
75
modules/nixos/homelab/orphans/ollama/ollamaCeres/default.nix
Executable file
|
|
@ -0,0 +1,75 @@
|
|||
{
|
||||
flake,
|
||||
...
|
||||
}:
|
||||
let
|
||||
inherit (flake.config.machines.devices)
|
||||
ceres
|
||||
;
|
||||
inherit (flake.config.services.instances)
|
||||
ollama
|
||||
web
|
||||
;
|
||||
service = ollama;
|
||||
localhost = web.localhost.address0;
|
||||
host = service.domains.url0;
|
||||
in
|
||||
{
|
||||
services = {
|
||||
ollama = {
|
||||
enable = true;
|
||||
group = service.name;
|
||||
host = "http://${localhost}";
|
||||
user = service.name;
|
||||
port = service.ports.port1;
|
||||
acceleration = "cuda";
|
||||
models = service.paths.path0;
|
||||
};
|
||||
open-webui = {
|
||||
enable = true;
|
||||
host = localhost;
|
||||
port = service.ports.port0;
|
||||
environment = {
|
||||
ENABLE_OLLAMA_API = "True";
|
||||
ANONYMIZED_TELEMETRY = "False";
|
||||
DO_NOT_TRACK = "True";
|
||||
SCARF_NO_ANALYTICS = "True";
|
||||
OLLAMA_BASE_URL = "http://${localhost}:${toString service.ports.port1}";
|
||||
WEBUI_AUTH = "True";
|
||||
};
|
||||
};
|
||||
caddy = {
|
||||
virtualHosts = {
|
||||
${host} = {
|
||||
extraConfig = ''
|
||||
reverse_proxy ${localhost}:${toString service.ports.port0}
|
||||
tls ${service.ssl.cert} ${service.ssl.key}
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
systemd.tmpfiles.rules = [
|
||||
"Z ${service.paths.path0} 0755 ${service.name} ${service.name} -"
|
||||
];
|
||||
networking = {
|
||||
firewall = {
|
||||
allowedTCPPorts = [
|
||||
service.ports.port0
|
||||
service.ports.port1
|
||||
];
|
||||
};
|
||||
};
|
||||
fileSystems = {
|
||||
"/var/lib/${service.name}" = {
|
||||
device = service.paths.path0;
|
||||
fsType = "none";
|
||||
options = [
|
||||
"bind"
|
||||
];
|
||||
depends = [
|
||||
ceres.storage0.mount
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
47
modules/nixos/homelab/orphans/ollama/ollamaMars/default.nix
Executable file
47
modules/nixos/homelab/orphans/ollama/ollamaMars/default.nix
Executable file
|
|
@ -0,0 +1,47 @@
|
|||
{
|
||||
flake,
|
||||
...
|
||||
}:
|
||||
let
|
||||
inherit (flake.config.services.instances)
|
||||
ollama
|
||||
web
|
||||
;
|
||||
service = ollama;
|
||||
localhost = web.localhost.address1;
|
||||
in
|
||||
{
|
||||
services = {
|
||||
ollama = {
|
||||
enable = true;
|
||||
group = service.name;
|
||||
host = "http://${localhost}";
|
||||
user = service.name;
|
||||
acceleration = "rocm";
|
||||
};
|
||||
open-webui = {
|
||||
enable = true;
|
||||
host = localhost;
|
||||
port = service.ports.port0;
|
||||
environment = {
|
||||
ENABLE_OLLAMA_API = "True";
|
||||
ANONYMIZED_TELEMETRY = "False";
|
||||
DO_NOT_TRACK = "True";
|
||||
SCARF_NO_ANALYTICS = "True";
|
||||
OLLAMA_BASE_URL = "http://${localhost}:${toString service.ports.port1}";
|
||||
WEBUI_AUTH = "True";
|
||||
};
|
||||
};
|
||||
};
|
||||
systemd.tmpfiles.rules = [
|
||||
"Z ${service.paths.path1} 0777 ${service.name} ${service.name} -"
|
||||
];
|
||||
networking = {
|
||||
firewall = {
|
||||
allowedTCPPorts = [
|
||||
service.ports.port0
|
||||
service.ports.port1
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
151
modules/nixos/homelab/orphans/peertube/default.nix
Executable file
151
modules/nixos/homelab/orphans/peertube/default.nix
Executable file
|
|
@ -0,0 +1,151 @@
|
|||
{
|
||||
flake,
|
||||
config,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
let
|
||||
inherit (flake.config.machines.devices)
|
||||
ceres
|
||||
;
|
||||
inherit (flake.config.services.instances)
|
||||
caddy
|
||||
peertube
|
||||
smtp
|
||||
web
|
||||
;
|
||||
service = peertube;
|
||||
localhost = web.localhost.address0;
|
||||
host = service.domains.url0;
|
||||
in
|
||||
{
|
||||
services = {
|
||||
peertube = {
|
||||
configureNginx = false;
|
||||
enable = true;
|
||||
enableWebHttps = true;
|
||||
group = service.name;
|
||||
listenWeb = caddy.ports.port1;
|
||||
listenHttp = service.ports.port0;
|
||||
localDomain = host;
|
||||
serviceEnvironmentFile = config.sops.secrets."${service.name}/root".path;
|
||||
user = service.name;
|
||||
plugins = {
|
||||
enable = true;
|
||||
plugins = builtins.attrValues {
|
||||
inherit (pkgs)
|
||||
peertube-plugin-livechat
|
||||
peertube-plugin-matomo
|
||||
peertube-plugin-transcoding-custom-quality
|
||||
peertube-theme-dark
|
||||
;
|
||||
};
|
||||
};
|
||||
|
||||
secrets = {
|
||||
secretsFile = config.sops.secrets."${service.name}/secret".path;
|
||||
};
|
||||
settings = {
|
||||
instance = {
|
||||
name = "upRootNutrition";
|
||||
};
|
||||
log = {
|
||||
level = "debug";
|
||||
};
|
||||
smtp = {
|
||||
transport = smtp.name;
|
||||
disable_starttls = false;
|
||||
from_address = service.email.address1;
|
||||
hostname = smtp.hostname;
|
||||
port = smtp.ports.port1;
|
||||
username = smtp.email.address1;
|
||||
tls = false;
|
||||
};
|
||||
};
|
||||
database = {
|
||||
createLocally = true;
|
||||
passwordFile = config.sops.secrets."${service.name}/database".path;
|
||||
};
|
||||
redis = {
|
||||
enableUnixSocket = true;
|
||||
createLocally = true;
|
||||
passwordFile = config.sops.secrets."${service.name}/redis".path;
|
||||
};
|
||||
smtp = {
|
||||
createLocally = true;
|
||||
passwordFile = config.sops.secrets."${service.name}/smtp".path;
|
||||
};
|
||||
};
|
||||
|
||||
caddy = {
|
||||
virtualHosts = {
|
||||
${host} = {
|
||||
extraConfig = ''
|
||||
reverse_proxy ${localhost}:${toString service.ports.port0}
|
||||
|
||||
tls ${service.ssl.cert} ${service.ssl.key}
|
||||
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
sops =
|
||||
let
|
||||
sopsPath = secret: {
|
||||
path = "${service.sops.path0}/${service.name}/${secret}";
|
||||
owner = service.name;
|
||||
mode = "600";
|
||||
};
|
||||
in
|
||||
{
|
||||
secrets = builtins.listToAttrs (
|
||||
map
|
||||
(secret: {
|
||||
name = "${service.name}/${secret}";
|
||||
value = sopsPath secret;
|
||||
})
|
||||
[
|
||||
"database"
|
||||
"redis"
|
||||
"root"
|
||||
"secret"
|
||||
"smtp"
|
||||
]
|
||||
);
|
||||
};
|
||||
|
||||
fileSystems."/var/lib/${service.name}" = {
|
||||
device = service.paths.path0;
|
||||
fsType = "none";
|
||||
options = [
|
||||
"bind"
|
||||
];
|
||||
depends = [
|
||||
ceres.storage0.mount
|
||||
];
|
||||
};
|
||||
|
||||
systemd.tmpfiles.rules = [
|
||||
"Z ${service.paths.path0} 755 ${service.name} ${service.name} -"
|
||||
"Z ${service.sops.path0} 755 ${service.name} ${service.name} -"
|
||||
];
|
||||
|
||||
users.users.${service.name}.extraGroups = [
|
||||
"nginx"
|
||||
"caddy"
|
||||
];
|
||||
|
||||
networking = {
|
||||
firewall = {
|
||||
allowedTCPPorts = [
|
||||
service.ports.port0
|
||||
service.ports.port1
|
||||
service.ports.port2
|
||||
service.ports.port3
|
||||
service.ports.port4
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
65
modules/nixos/homelab/orphans/searx/config/engines.nix
Executable file
65
modules/nixos/homelab/orphans/searx/config/engines.nix
Executable file
|
|
@ -0,0 +1,65 @@
|
|||
{ lib, ... }:
|
||||
lib.mapAttrsToList (name: value: { inherit name; } // value) {
|
||||
"duckduckgo".disabled = false;
|
||||
"brave".disabled = false;
|
||||
"bing".disabled = false;
|
||||
"mojeek".disabled = true;
|
||||
"mwmbl".disabled = false;
|
||||
"mwmbl".weight = 0.4;
|
||||
"qwant".disabled = true;
|
||||
"crowdview".disabled = false;
|
||||
"crowdview".weight = 0.5;
|
||||
"curlie".disabled = true;
|
||||
"ddg definitions".disabled = false;
|
||||
"ddg definitions".weight = 2;
|
||||
"wikibooks".disabled = false;
|
||||
"wikidata".disabled = false;
|
||||
"wikiquote".disabled = true;
|
||||
"wikisource".disabled = true;
|
||||
"wikispecies".disabled = false;
|
||||
"wikispecies".weight = 0.5;
|
||||
"wikiversity".disabled = false;
|
||||
"wikiversity".weight = 0.5;
|
||||
"wikivoyage".disabled = false;
|
||||
"wikivoyage".weight = 0.5;
|
||||
"currency".disabled = true;
|
||||
"dictzone".disabled = true;
|
||||
"lingva".disabled = true;
|
||||
"bing images".disabled = false;
|
||||
"brave.images".disabled = false;
|
||||
"duckduckgo images".disabled = false;
|
||||
"google images".disabled = false;
|
||||
"qwant images".disabled = true;
|
||||
"1x".disabled = true;
|
||||
"artic".disabled = false;
|
||||
"deviantart".disabled = false;
|
||||
"flickr".disabled = true;
|
||||
"imgur".disabled = false;
|
||||
"library of congress".disabled = false;
|
||||
"material icons".disabled = true;
|
||||
"material icons".weight = 0.2;
|
||||
"openverse".disabled = false;
|
||||
"pinterest".disabled = true;
|
||||
"svgrepo".disabled = false;
|
||||
"unsplash".disabled = false;
|
||||
"wallhaven".disabled = false;
|
||||
"wikicommons.images".disabled = false;
|
||||
"yacy images".disabled = true;
|
||||
"bing videos".disabled = false;
|
||||
"brave.videos".disabled = true;
|
||||
"duckduckgo videos".disabled = true;
|
||||
"google videos".disabled = false;
|
||||
"qwant videos".disabled = false;
|
||||
"dailymotion".disabled = true;
|
||||
"google play movies".disabled = true;
|
||||
"invidious".disabled = true;
|
||||
"odysee".disabled = true;
|
||||
"peertube".disabled = true;
|
||||
"piped".disabled = true;
|
||||
"rumble".disabled = false;
|
||||
"sepiasearch".disabled = false;
|
||||
"vimeo".disabled = true;
|
||||
"youtube".disabled = false;
|
||||
"brave.news".disabled = true;
|
||||
"google news".disabled = true;
|
||||
}
|
||||
8
modules/nixos/homelab/orphans/searx/config/general.nix
Executable file
8
modules/nixos/homelab/orphans/searx/config/general.nix
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
debug = false;
|
||||
instance_name = "SearXNG Instance";
|
||||
donation_url = false;
|
||||
contact_url = false;
|
||||
privacypolicy_url = false;
|
||||
enable_metrics = false;
|
||||
}
|
||||
7
modules/nixos/homelab/orphans/searx/config/outgoing.nix
Executable file
7
modules/nixos/homelab/orphans/searx/config/outgoing.nix
Executable file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
request_timeout = 5.0;
|
||||
max_request_timeout = 15.0;
|
||||
pool_connections = 100;
|
||||
pool_maxsize = 15;
|
||||
enable_http2 = true;
|
||||
}
|
||||
9
modules/nixos/homelab/orphans/searx/config/plugins.nix
Executable file
9
modules/nixos/homelab/orphans/searx/config/plugins.nix
Executable file
|
|
@ -0,0 +1,9 @@
|
|||
[
|
||||
"Basic Calculator"
|
||||
"Hash plugin"
|
||||
"Tor check plugin"
|
||||
"Open Access DOI rewrite"
|
||||
"Hostnames plugin"
|
||||
"Unit converter plugin"
|
||||
"Tracker URL remover"
|
||||
]
|
||||
7
modules/nixos/homelab/orphans/searx/config/search.nix
Executable file
7
modules/nixos/homelab/orphans/searx/config/search.nix
Executable file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
safe_search = 0;
|
||||
autocomplete_min = 2;
|
||||
autocomplete = "duckduckgo";
|
||||
ban_time_on_fail = 5;
|
||||
max_ban_time_on_fail = 120;
|
||||
}
|
||||
27
modules/nixos/homelab/orphans/searx/config/server.nix
Executable file
27
modules/nixos/homelab/orphans/searx/config/server.nix
Executable file
|
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
flake,
|
||||
config,
|
||||
configHelpers,
|
||||
...
|
||||
}:
|
||||
let
|
||||
inherit (flake.config.machines.devices) ceres;
|
||||
in
|
||||
{
|
||||
port = configHelpers.service.ports.port0;
|
||||
bind_address =
|
||||
if configHelpers.hostname == ceres.name then ceres.wireguard.ip0 else configHelpers.localhost;
|
||||
secret_key = config.sops.secrets."searx/key".path;
|
||||
limiter = false;
|
||||
public_instance = false;
|
||||
image_proxy = true;
|
||||
method = "GET";
|
||||
}
|
||||
// (
|
||||
if configHelpers.hostname == ceres.name then
|
||||
{
|
||||
base_url = "https://${configHelpers.host}";
|
||||
}
|
||||
else
|
||||
{ }
|
||||
)
|
||||
11
modules/nixos/homelab/orphans/searx/config/ui.nix
Executable file
11
modules/nixos/homelab/orphans/searx/config/ui.nix
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
static_use_hash = true;
|
||||
default_locale = "en";
|
||||
query_in_title = true;
|
||||
infinite_scroll = true;
|
||||
center_alignment = true;
|
||||
default_theme = "simple";
|
||||
theme_args.simple_style = "auto";
|
||||
search_on_category_select = true;
|
||||
hotkeys = "vim";
|
||||
}
|
||||
101
modules/nixos/homelab/orphans/searx/default.nix
Executable file
101
modules/nixos/homelab/orphans/searx/default.nix
Executable file
|
|
@ -0,0 +1,101 @@
|
|||
{
|
||||
flake,
|
||||
lib,
|
||||
config,
|
||||
...
|
||||
}:
|
||||
let
|
||||
inherit (flake.config.machines.devices) ceres mars deimos;
|
||||
inherit (flake.config.services.instances) searx web;
|
||||
configHelpers = {
|
||||
service = searx;
|
||||
hostname = config.networking.hostName;
|
||||
localhost = web.localhost.address0;
|
||||
host = configHelpers.service.domains.url0;
|
||||
};
|
||||
configPath = ./config;
|
||||
configImports = {
|
||||
general = import (configPath + /general.nix);
|
||||
ui = import (configPath + /ui.nix);
|
||||
search = import (configPath + /search.nix);
|
||||
server = import (configPath + /server.nix) { inherit config flake configHelpers; };
|
||||
engines = import (configPath + /engines.nix) { inherit lib; };
|
||||
outgoing = import (configPath + /outgoing.nix);
|
||||
enabled_plugins = import (configPath + /plugins.nix);
|
||||
};
|
||||
|
||||
in
|
||||
{
|
||||
services = {
|
||||
searx = {
|
||||
enable = true;
|
||||
redisCreateLocally = true;
|
||||
uwsgiConfig = {
|
||||
socket = "/run/searx/searx.sock";
|
||||
http = ":${builtins.toString configHelpers.service.ports.port0}";
|
||||
chmod-socket = "660";
|
||||
};
|
||||
settings = configImports;
|
||||
};
|
||||
}
|
||||
// (
|
||||
if configHelpers.hostname == ceres.name then
|
||||
{
|
||||
caddy = {
|
||||
virtualHosts = {
|
||||
"${configHelpers.host}" = {
|
||||
extraConfig = ''
|
||||
@allowed_ips {
|
||||
remote_ip ${mars.wireguard.ip0} ${deimos.wireguard.ip0}
|
||||
}
|
||||
|
||||
handle @allowed_ips {
|
||||
redir /.well-known/carddav /remote.php/dav/ 301
|
||||
redir /.well-known/caldav /remote.php/dav/ 301
|
||||
reverse_proxy ${ceres.wireguard.ip0}:${toString configHelpers.service.ports.port0}
|
||||
}
|
||||
handle {
|
||||
respond "Access Denied" 403
|
||||
}
|
||||
tls ${configHelpers.service.ssl.cert} ${configHelpers.service.ssl.key}
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
else
|
||||
{ }
|
||||
);
|
||||
|
||||
users.groups.searx.members = [ "caddy" ];
|
||||
# systemd.services.caddy.serviceConfig.ProtectHome = false;
|
||||
|
||||
sops =
|
||||
let
|
||||
sopsPath = secret: {
|
||||
path = "${configHelpers.service.sops.path0}/${configHelpers.service.name}-${secret}";
|
||||
owner = configHelpers.service.name;
|
||||
mode = "600";
|
||||
};
|
||||
in
|
||||
{
|
||||
secrets = builtins.listToAttrs (
|
||||
map
|
||||
(secret: {
|
||||
name = "${configHelpers.service.name}/${secret}";
|
||||
value = sopsPath secret;
|
||||
})
|
||||
[
|
||||
"key"
|
||||
]
|
||||
);
|
||||
};
|
||||
|
||||
networking = {
|
||||
firewall = {
|
||||
interfaces.wg0.allowedTCPPorts = [
|
||||
configHelpers.service.ports.port0
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue