387 lines
12 KiB
Nix
387 lines
12 KiB
Nix
{ inputs, ... }:
|
|
|
|
# ============================================================================
|
|
# USDA Dashboard External System Module
|
|
# ============================================================================
|
|
# External system configuration for usda-dash
|
|
# This module can be referenced from nixos-systems/inventory.nix using:
|
|
#
|
|
# nix-lxc = {
|
|
# devices = {
|
|
# "usda-dash" = builtins.fetchGit {
|
|
# url = "https://git.factory.uga.edu/MODEL/usda-dash-config.git";
|
|
# rev = "commit-hash";
|
|
# submodules = true; # REQUIRED for usda-vision submodule
|
|
# };
|
|
# };
|
|
# };
|
|
#
|
|
# IMPORTANT: For LXC containers running Docker, the Proxmox LXC must be configured with:
|
|
# - Features: nesting=1, keyctl=1
|
|
# - Unprivileged: no (or privileged: yes)
|
|
# Edit the container config in Proxmox: /etc/pve/lxc/<VMID>.conf
|
|
# Add: features: nesting=1,keyctl=1
|
|
|
|
{
|
|
config,
|
|
lib,
|
|
pkgs,
|
|
...
|
|
}:
|
|
|
|
let
|
|
# Camera SDK derivation - extracts and installs the SDK
|
|
camera-sdk = pkgs.stdenv.mkDerivation {
|
|
pname = "mindvision-camera-sdk";
|
|
version = "2.1.0.49";
|
|
|
|
src = ./usda-vision/camera-management-api/camera_sdk/linuxSDK_V2.1.0.49(250108).tar.gz;
|
|
|
|
nativeBuildInputs = [ pkgs.makeWrapper ];
|
|
buildInputs = [ pkgs.libusb1 ];
|
|
|
|
unpackPhase = ''
|
|
tar xzf $src
|
|
cd "linuxSDK_V2.1.0.49(250108)"
|
|
'';
|
|
|
|
installPhase = ''
|
|
mkdir -p $out/lib $out/include
|
|
|
|
# Copy library files
|
|
if [ -d lib ]; then
|
|
cp -r lib/* $out/lib/ || true
|
|
fi
|
|
|
|
# Copy header files
|
|
if [ -d include ]; then
|
|
cp -r include/* $out/include/ || true
|
|
fi
|
|
|
|
# Make libraries executable
|
|
chmod +x $out/lib/*.so* 2>/dev/null || true
|
|
'';
|
|
|
|
meta = {
|
|
description = "MindVision Camera SDK";
|
|
platforms = pkgs.lib.platforms.linux;
|
|
};
|
|
};
|
|
|
|
# Create a derivation that packages the usda-vision directory
|
|
usda-vision-app = pkgs.stdenv.mkDerivation {
|
|
pname = "usda-vision";
|
|
version = "1.0.0";
|
|
|
|
# Use the directory from this repository with explicit source filtering
|
|
# The ./usda-vision path is relative to this module file
|
|
src = lib.cleanSourceWith {
|
|
src = ./usda-vision;
|
|
filter = path: type:
|
|
let
|
|
baseName = baseNameOf path;
|
|
in
|
|
# Exclude git, but include everything else
|
|
baseName != ".git" &&
|
|
baseName != ".cursor" &&
|
|
baseName != "__pycache__" &&
|
|
baseName != "node_modules" &&
|
|
baseName != ".venv";
|
|
};
|
|
|
|
nativeBuildInputs = [ pkgs.makeWrapper pkgs.rsync ];
|
|
|
|
# Don't run these phases, we'll do everything in installPhase
|
|
dontBuild = true;
|
|
dontConfigure = true;
|
|
|
|
installPhase = ''
|
|
mkdir -p $out/opt/usda-vision
|
|
|
|
# Debug: show what's in source
|
|
echo "Source directory contents:"
|
|
ls -la $src/ || true
|
|
|
|
# Process docker-compose.yml - replace paths, hostnames, and configure SDK from Nix
|
|
if [ -f $src/docker-compose.yml ]; then
|
|
# Use Python for multi-line block removal
|
|
${pkgs.python3}/bin/python3 <<PYTHON_SCRIPT > $TMPDIR/docker-compose.yml
|
|
import re
|
|
|
|
with open('$src/docker-compose.yml', 'r') as f:
|
|
content = f.read()
|
|
|
|
# Replace paths and hostnames
|
|
replacements = [
|
|
(r'env_file:.*management-dashboard-web-app/\\.env', 'env_file: /var/lib/usda-vision/.env'),
|
|
(r'\\./management-dashboard-web-app/\\.env', '/var/lib/usda-vision/.env'),
|
|
(r'\\./management-dashboard-web-app', '/var/lib/usda-vision/management-dashboard-web-app'),
|
|
(r'\\./media-api', '/var/lib/usda-vision/media-api'),
|
|
(r'\\./video-remote', '/var/lib/usda-vision/video-remote'),
|
|
(r'\\./scheduling-remote', '/var/lib/usda-vision/scheduling-remote'),
|
|
(r'\\./vision-system-remote', '/var/lib/usda-vision/vision-system-remote'),
|
|
(r'\\./camera-management-api', '/var/lib/usda-vision/camera-management-api'),
|
|
('exp-dash', '192.168.1.156'),
|
|
('localhost', '192.168.1.156'),
|
|
('LD_LIBRARY_PATH=/usr/local/lib:/lib:/usr/lib', 'LD_LIBRARY_PATH=/lib/camera-sdk:/usr/local/lib:/lib:/usr/lib'),
|
|
]
|
|
|
|
for pattern, replacement in replacements:
|
|
content = re.sub(pattern, replacement, content)
|
|
|
|
# Add SDK volume mount after timezone mount
|
|
content = re.sub(
|
|
r'( - /etc/timezone:/etc/timezone:ro)',
|
|
r'\\1\\n - \${camera-sdk}/lib:/lib/camera-sdk:ro',
|
|
content
|
|
)
|
|
|
|
# Remove SDK installation blocks (first block)
|
|
content = re.sub(
|
|
r' # Only install system packages if not already installed.*?else\\n.*?echo.*?System dependencies already installed.*?fi\\n',
|
|
'',
|
|
content,
|
|
flags=re.DOTALL
|
|
)
|
|
|
|
# Remove SDK installation blocks (second block)
|
|
content = re.sub(
|
|
r' # Install camera SDK if not already installed.*?fi;\\n',
|
|
'',
|
|
content,
|
|
flags=re.DOTALL
|
|
)
|
|
|
|
print(content, end='')
|
|
PYTHON_SCRIPT
|
|
fi
|
|
|
|
# Copy all application files using rsync with chmod, excluding files we'll provide separately
|
|
${pkgs.rsync}/bin/rsync -av --chmod=Du+w --exclude='.git' --exclude='docker-compose.yml' --exclude='.env' --exclude='management-dashboard-web-app/.env' $src/ $out/opt/usda-vision/
|
|
|
|
# Copy the processed docker-compose.yml
|
|
if [ -f $TMPDIR/docker-compose.yml ]; then
|
|
cp $TMPDIR/docker-compose.yml $out/opt/usda-vision/docker-compose.yml
|
|
fi
|
|
|
|
# Verify files were copied
|
|
echo "Destination directory contents:"
|
|
ls -la $out/opt/usda-vision/ || true
|
|
|
|
# Create convenience scripts
|
|
mkdir -p $out/bin
|
|
|
|
cat > $out/bin/usda-vision-start <<'EOF'
|
|
#!/usr/bin/env bash
|
|
cd $out/opt/usda-vision
|
|
${pkgs.docker-compose}/bin/docker-compose up -d --build
|
|
EOF
|
|
|
|
cat > $out/bin/usda-vision-stop <<'EOF'
|
|
#!/usr/bin/env bash
|
|
cd $out/opt/usda-vision
|
|
${pkgs.docker-compose}/bin/docker-compose down
|
|
EOF
|
|
|
|
cat > $out/bin/usda-vision-logs <<'EOF'
|
|
#!/usr%bin/env bash
|
|
cd $out/opt/usda-vision
|
|
${pkgs.docker-compose}/bin/docker-compose logs -f "$@"
|
|
EOF
|
|
|
|
cat > $out/bin/usda-vision-restart <<'EOF'
|
|
#!/usr/bin/env bash
|
|
cd $out/opt/usda-vision
|
|
${pkgs.docker-compose}/bin/docker-compose restart "$@"
|
|
EOF
|
|
|
|
chmod +x $out/bin/usda-vision-*
|
|
'';
|
|
|
|
meta = {
|
|
description = "USDA Vision camera management system";
|
|
maintainers = [ "UGA Innovation Factory" ];
|
|
};
|
|
};
|
|
in
|
|
|
|
{
|
|
# ========== Module Configuration ==========
|
|
config = {
|
|
# Nix configuration for LXC container without sandbox support
|
|
nix.settings = {
|
|
sandbox = false; # LXC containers don't support kernel namespaces for sandboxing
|
|
experimental-features = [ "nix-command" "flakes" ];
|
|
};
|
|
|
|
# System packages specific to usda-dash
|
|
environment.systemPackages = with pkgs; [
|
|
# Core tools
|
|
git
|
|
vim
|
|
htop
|
|
curl
|
|
wget
|
|
|
|
# Docker and Docker Compose for running usda-vision
|
|
docker
|
|
docker-compose
|
|
|
|
# Supabase
|
|
supabase-cli
|
|
|
|
# Camera SDK
|
|
camera-sdk
|
|
|
|
# USDA Vision application package with convenience scripts
|
|
usda-vision-app
|
|
];
|
|
|
|
# Make camera SDK libraries available system-wide
|
|
environment.variables = {
|
|
LD_LIBRARY_PATH = "${camera-sdk}/lib";
|
|
};
|
|
|
|
# Enable Docker service with LXC-compatible settings
|
|
virtualisation.docker = {
|
|
enable = true;
|
|
autoPrune.enable = true;
|
|
# Enable experimental features for better LXC compatibility
|
|
daemon.settings = {
|
|
experimental = true;
|
|
};
|
|
};
|
|
|
|
# LXC-specific settings for nested containers
|
|
boot.kernel.sysctl = {
|
|
# Required for Docker networking in LXC
|
|
"net.ipv4.ip_forward" = 1;
|
|
"net.ipv4.conf.all.forwarding" = 1;
|
|
};
|
|
|
|
# Configure users
|
|
athenix.users.sv22900.enable = true;
|
|
|
|
# Add users to docker group
|
|
users.users.sv22900.extraGroups = [ "docker" ];
|
|
users.users.engr-ugaif.extraGroups = [ "docker" ];
|
|
|
|
# Create persistent directories and .env file location
|
|
systemd.tmpfiles.rules = [
|
|
"d /var/lib/usda-vision 0755 root root -"
|
|
"f /var/lib/usda-vision/.env 0644 root root -"
|
|
"d /var/lib/supabase 0755 root root -"
|
|
];
|
|
|
|
# Supabase CLI configuration - runs in writable directory
|
|
systemd.services.supabase-cli = {
|
|
enable = true;
|
|
description = "Supabase CLI Service";
|
|
|
|
preStart = ''
|
|
# Clean slate - remove old content but keep the directory
|
|
rm -rf /var/lib/supabase/*
|
|
rm -rf /var/lib/supabase/.* 2>/dev/null || true
|
|
|
|
# Copy supabase directory structure from the app
|
|
if [ -d ${usda-vision-app}/opt/usda-vision/supabase ]; then
|
|
${pkgs.rsync}/bin/rsync -av ${usda-vision-app}/opt/usda-vision/supabase/ /var/lib/supabase/supabase/
|
|
fi
|
|
|
|
# Create necessary directories for supabase
|
|
mkdir -p /var/lib/supabase/supabase/.branches
|
|
chmod -R 755 /var/lib/supabase
|
|
'';
|
|
|
|
serviceConfig = {
|
|
WorkingDirectory = "/var/lib/supabase";
|
|
ExecStart = "${pkgs.supabase-cli}/bin/supabase start";
|
|
Type = "oneshot";
|
|
RemainAfterExit = true;
|
|
User = "root";
|
|
Group = "root";
|
|
};
|
|
};
|
|
|
|
# Systemd service to manage usda-vision docker compose
|
|
systemd.services.usda-vision = {
|
|
description = "USDA Vision Docker Compose Stack";
|
|
after = [ "docker.service" "network-online.target" "systemd-tmpfiles-setup.service" ];
|
|
wants = [ "network-online.target" ];
|
|
wantedBy = [ "multi-user.target" ];
|
|
|
|
# Only start if .env file exists and is not empty
|
|
unitConfig = {
|
|
ConditionPathExists = "/var/lib/usda-vision/.env";
|
|
ConditionPathIsReadWrite = "/var/lib/usda-vision/.env";
|
|
};
|
|
|
|
preStart = ''
|
|
# Copy application code to writable directory if not already present or if source is newer
|
|
echo "Syncing application code to /var/lib/usda-vision..."
|
|
${pkgs.rsync}/bin/rsync -av --delete \
|
|
--exclude='node_modules' \
|
|
--exclude='.env' \
|
|
--exclude='__pycache__' \
|
|
--exclude='.venv' \
|
|
${usda-vision-app}/opt/usda-vision/ /var/lib/usda-vision/
|
|
|
|
# Ensure .env file exists with defaults if empty
|
|
if [ ! -s /var/lib/usda-vision/.env ]; then
|
|
if [ -f ${usda-vision-app}/opt/usda-vision/.env.example ]; then
|
|
echo "Copying .env.example to /var/lib/usda-vision/.env"
|
|
cp ${usda-vision-app}/opt/usda-vision/.env.example /var/lib/usda-vision/.env
|
|
echo "Please edit /var/lib/usda-vision/.env with your configuration"
|
|
fi
|
|
fi
|
|
'';
|
|
|
|
serviceConfig = {
|
|
Type = "oneshot";
|
|
RemainAfterExit = true;
|
|
WorkingDirectory = "/var/lib/usda-vision";
|
|
User = "root";
|
|
Group = "root";
|
|
|
|
# Start: pull latest images and start containers from writable directory
|
|
ExecStart = "${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml up -d --build";
|
|
|
|
# Stop: gracefully stop containers
|
|
ExecStop = "${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml down";
|
|
|
|
# Reload: restart containers
|
|
ExecReload = "${pkgs.bash}/bin/bash -c '${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml down && ${pkgs.docker-compose}/bin/docker-compose -f /var/lib/usda-vision/docker-compose.yml up -d --build'";
|
|
|
|
TimeoutStartSec = 300;
|
|
TimeoutStopSec = 120;
|
|
};
|
|
};
|
|
|
|
# Firewall configuration - open ports for USDA Vision services
|
|
networking.firewall = {
|
|
enable = true;
|
|
allowedTCPPorts = [
|
|
# Web services
|
|
80 # HTTP
|
|
443 # HTTPS
|
|
3000 # Main web app (if exposed directly)
|
|
|
|
# Supabase services
|
|
54321 # Supabase Kong (API Gateway)
|
|
54322 # Supabase PostgreSQL
|
|
54323 # Supabase Studio
|
|
54324 # Supabase Inbucket (email testing)
|
|
54327 # Supabase Analytics
|
|
|
|
# USDA Vision services
|
|
8090 # Media API
|
|
8189 # MediaMTX API
|
|
8554 # RTSP (MediaMTX)
|
|
8889 # MediaMTX WebRTC
|
|
];
|
|
};
|
|
|
|
# Any other usda-dash specific configuration
|
|
};
|
|
}
|