Compare commits

..

1 commit

Author SHA1 Message Date
Ingolf Wagner
f64a23810b
🚧 bugwarrior with taskwarrior3 2024-09-22 12:34:45 +09:00
145 changed files with 3236 additions and 1486 deletions

View file

@ -48,8 +48,8 @@ jobs:
rm .ssh_key rm .ssh_key
- name: nix build orbi - name: nix build orbi
run: nix build .#nixosConfigurations.orbi.config.system.build.toplevel run: nix build .#nixosConfigurations.orbi.config.system.build.toplevel
# - name: nix build cream - name: nix build cream
# run: nix build .#nixosConfigurations.cream.config.system.build.toplevel run: nix build .#nixosConfigurations.cream.config.system.build.toplevel
- name: nix build cherry - name: nix build cherry
run: nix build .#nixosConfigurations.cherry.config.system.build.toplevel run: nix build .#nixosConfigurations.cherry.config.system.build.toplevel
- name: nix build chungus - name: nix build chungus

View file

@ -5,6 +5,7 @@
./gui ./gui
./mainUser.nix ./mainUser.nix
./media ./media
./monitor
./network ./network
./nixos ./nixos
./terminal ./terminal

View file

@ -25,6 +25,7 @@ with lib;
environment.systemPackages = with pkgs; [ environment.systemPackages = with pkgs; [
alsa-utils alsa-utils
alsaUtils
# PulseAudio control # PulseAudio control
# ------------------ # ------------------

View file

@ -8,13 +8,11 @@ in
programs.chromium.extensions = [ programs.chromium.extensions = [
"nngceckbapebfimnlniiiahkandclblb" # bitwarden "nngceckbapebfimnlniiiahkandclblb" # bitwarden
# "edibdbjcniadpccecjdfdjjppcpchdlm" # I still don't care about cookies "edibdbjcniadpccecjdfdjjppcpchdlm" # I still don't care about cookies
"gcbommkclmclpchllfjekcdonpmejbdp" # https everywhere "gcbommkclmclpchllfjekcdonpmejbdp" # https everywhere
"cjpalhdlnbpafiamejdnhcphjbkeiagm" # ublock origin "cjpalhdlnbpafiamejdnhcphjbkeiagm" # ublock origin
"dbepggeogbaibhgnhhndojpepiihcmeb" # vimium "dbepggeogbaibhgnhhndojpepiihcmeb" # vimium
"jinjaccalgkegednnccohejagnlnfdag" # Violentmonkey "jinjaccalgkegednnccohejagnlnfdag" # Violentmonkey
"dpplabbmogkhghncfbfdeeokoefdjegm" # Proxy SwitchySharp
"mooikfkahbdckldjjndioackbalphokd" # Selenium IDE
]; ];
# overwrite use zram on small RAM systems # overwrite use zram on small RAM systems

View file

@ -1,20 +1,17 @@
{ { lib, config, ... }:
lib,
config,
inputs,
...
}:
with lib; with lib;
with types; with types;
{ {
imports = [ imports = [
./default.nix
../timezone.nix ../timezone.nix
]; ];
config = { config = {
telemetry.enable = mkDefault true; components.monitor.enable = mkDefault true;
telemetry.metrics.enable = mkDefault false; components.monitor.metrics.enable = mkDefault false;
telemetry.opentelemetry.enable = false; components.monitor.opentelemetry.enable = false;
services.journald.extraConfig = "SystemMaxUse=1G"; services.journald.extraConfig = "SystemMaxUse=1G";
}; };

View file

@ -0,0 +1,32 @@
{ lib, config, ... }:
with lib;
with types;
{
options.components.monitor = {
enable = mkOption {
type = bool;
default = true;
};
metrics.enable = mkOption {
type = bool;
default = config.components.monitor.enable;
};
logs.enable = mkOption {
type = bool;
default = config.components.monitor.enable;
};
};
imports = [
./logs-promtail.nix
./metrics-export-zfs.nix
./metrics-netdata.nix
./metrics-prometheus.nix
./metrics-telegraf.nix
./opentelemetry.nix
];
config = mkIf config.components.monitor.enable { };
}

View file

@ -0,0 +1,185 @@
{ config, lib, ... }:
with lib;
with types;
let
cfg = config.components.monitor.promtail;
in
{
options.components.monitor.promtail = {
enable = mkOption {
type = lib.types.bool;
default = config.components.monitor.logs.enable;
};
port = mkOption {
type = int;
default = 3500;
description = "port to provide promtail export";
};
};
config = mkMerge [
(mkIf config.components.monitor.opentelemetry.enable {
services.opentelemetry-collector.settings = {
receivers.loki = {
protocols.http.endpoint = "127.0.0.1:${toString cfg.port}";
use_incoming_timestamp = true;
};
service.pipelines.logs.receivers = [ "loki" ];
};
})
(mkIf config.components.monitor.promtail.enable {
services.promtail = {
enable = true;
configuration = {
server.disable = true;
positions.filename = "/var/cache/promtail/positions.yaml";
clients = [
{ url = "http://127.0.0.1:${toString cfg.port}/loki/api/v1/push"; }
];
scrape_configs =
let
_replace = index: replacement: ''{{ Replace .Value "${toString index}" "${replacement}" 1 }}'';
_elseif = index: ''{{ else if eq .Value "${toString index}" }}'';
_if = index: ''{{ if eq .Value "${toString index}" }}'';
_end = ''{{ end }}'';
elseblock = index: replacement: "${_elseif index}${_replace index replacement}";
ifblock = index: replacement: "${_if index}${_replace index replacement}";
createTemplateLine =
list:
"${
concatStrings (
imap0 (
index: replacement: if index == 0 then ifblock index replacement else elseblock index replacement
) list
)
}${_end}";
in
[
{
job_name = "journal";
journal = {
json = true;
max_age = "12h";
labels.job = "systemd-journal";
};
pipeline_stages = [
{
# Set of key/value pairs of JMESPath expressions. The key will be
# the key in the extracted data while the expression will be the value,
# evaluated as a JMESPath from the source data.
json.expressions = {
# journalctl -o json | jq and you'll see these
boot_id = "_BOOT_ID";
facility = "SYSLOG_FACILITY";
facility_label = "SYSLOG_FACILITY";
instance = "_HOSTNAME";
msg = "MESSAGE";
priority = "PRIORITY";
priority_label = "PRIORITY";
transport = "_TRANSPORT";
unit = "_SYSTEMD_UNIT";
# coredump
#coredump_cgroup = "COREDUMP_CGROUP";
#coredump_exe = "COREDUMP_EXE";
#coredump_cmdline = "COREDUMP_CMDLINE";
#coredump_uid = "COREDUMP_UID";
#coredump_gid = "COREDUMP_GID";
};
}
{
# Set the unit (defaulting to the transport like audit and kernel)
template = {
source = "unit";
template = "{{if .unit}}{{.unit}}{{else}}{{.transport}}{{end}}";
};
}
{
# Normalize session IDs (session-1234.scope -> session.scope) to limit number of label values
replace = {
source = "unit";
expression = "^(session-\\d+.scope)$";
replace = "session.scope";
};
}
{
# Map priority to human readable
template = {
source = "priority_label";
#template = ''{{ if eq .Value "0" }}{{ Replace .Value "0" "emerg" 1 }}{{ else if eq .Value "1" }}{{ Replace .Value "1" "alert" 1 }}{{ else if eq .Value "2" }}{{ Replace .Value "2" "crit" 1 }}{{ else if eq .Value "3" }}{{ Replace .Value "3" "err" 1 }}{{ else if eq .Value "4" }}{{ Replace .Value "4" "warning" 1 }}{{ else if eq .Value "5" }}{{ Replace .Value "5" "notice" 1 }}{{ else if eq .Value "6" }}{{ Replace .Value "6" "info" 1 }}{{ else if eq .Value "7" }}{{ Replace .Value "7" "debug" 1 }}{{ end }}'';
template = createTemplateLine [
"emergency"
"alert"
"critical"
"error"
"warning"
"notice"
"info"
"debug"
];
};
}
{
# Map facility to human readable
template = {
source = "facility_label";
template = createTemplateLine [
"kern" # Kernel messages
"user" # User-level messages
"mail" # Mail system Archaic POSIX still supported and sometimes used (for more mail(1))
"daemon" # System daemons All daemons, including systemd and its subsystems
"auth" # Security/authorization messages Also watch for different facility 10
"syslog" # Messages generated internally by syslogd For syslogd implementations (not used by systemd, see facility 3)
"lpr" # Line printer subsystem (archaic subsystem)
"news" # Network news subsystem (archaic subsystem)
"uucp" # UUCP subsystem (archaic subsystem)
"clock" # Clock daemon systemd-timesyncd
"authpriv" # Security/authorization messages Also watch for different facility 4
"ftp" # FTP daemon
"-" # NTP subsystem
"-" # Log audit
"-" # Log alert
"cron" # Scheduling daemon
"local0" # Local use 0 (local0)
"local1" # Local use 1 (local1)
"local2" # Local use 2 (local2)
"local3" # Local use 3 (local3)
"local4" # Local use 4 (local4)
"local5" # Local use 5 (local5)
"local6" # Local use 6 (local6)
"local7" # Local use 7 (local7)
];
};
}
{
# Key is REQUIRED and the name for the label that will be created.
# Value is optional and will be the name from extracted data whose value
# will be used for the value of the label. If empty, the value will be
# inferred to be the same as the key.
labels = {
boot_id = "";
facility = "";
facility_label = "";
instance = "";
priority = "";
priority_label = "";
transport = "";
unit = "";
};
}
{
# Write the proper message instead of JSON
output.source = "msg";
}
];
}
];
};
};
})
];
}

View file

@ -0,0 +1,39 @@
{
pkgs,
config,
lib,
...
}:
with lib;
with types;
{
options.components.monitor.exporters.zfs.enable = mkOption {
type = lib.types.bool;
default = config.components.monitor.metrics.enable;
};
config = mkMerge [
(mkIf config.components.monitor.exporters.zfs.enable {
services.telegraf.extraConfig.inputs.zfs = { };
services.prometheus.exporters.zfs.enable = true;
services.opentelemetry-collector.settings = {
receivers.prometheus.config.scrape_configs = [
{
job_name = "zfs";
scrape_interval = "10s";
static_configs = [
{
targets = [ "127.0.0.1:${toString config.services.prometheus.exporters.zfs.port}" ];
}
];
}
];
service.pipelines.metrics.receivers = [ "prometheus" ];
};
})
];
}

View file

@ -0,0 +1,40 @@
{
lib,
pkgs,
config,
...
}:
with lib;
with types;
{
options.components.monitor.netdata = {
enable = mkOption {
type = bool;
default = config.components.monitor.metrics.enable;
};
};
config = mkIf config.components.monitor.netdata.enable {
# netdata sink
services.opentelemetry-collector.settings.receivers.prometheus.config.scrape_configs = [
{
job_name = "netdata";
scrape_interval = "10s";
metrics_path = "/api/v1/allmetrics";
params.format = [ "prometheus" ];
static_configs = [ { targets = [ "127.0.0.1:19999" ]; } ];
}
];
# https://docs.netdata.cloud/daemon/config/
services.netdata = {
enable = lib.mkDefault true;
config = {
global = {
"memory mode" = "ram";
};
};
};
};
}

View file

@ -0,0 +1,45 @@
{ config, lib, ... }:
with lib;
with types;
let
cfg = config.components.monitor.prometheus;
in
{
options.components.monitor.prometheus = {
enable = mkOption {
type = lib.types.bool;
default = config.components.monitor.metrics.enable;
};
port = mkOption {
type = int;
default = 8090;
description = "port to provide Prometheus export";
};
};
config = mkMerge [
(mkIf config.components.monitor.prometheus.enable {
services.prometheus = {
checkConfig = "syntax-only";
enable = true;
};
})
(mkIf config.components.monitor.prometheus.enable {
services.opentelemetry-collector.settings = {
exporters.prometheus.endpoint = "127.0.0.1:${toString cfg.port}";
service.pipelines.metrics.exporters = [ "prometheus" ];
};
services.prometheus.scrapeConfigs = [
{
job_name = "opentelemetry";
metrics_path = "/metrics";
scrape_interval = "10s";
static_configs = [ { targets = [ "localhost:${toString cfg.port}" ]; } ];
}
];
})
];
}

View file

@ -0,0 +1,57 @@
{
config,
pkgs,
lib,
...
}:
with lib;
with types;
let
cfg = config.components.monitor.telegraf;
in
{
options.components.monitor.telegraf = {
enable = mkOption {
type = lib.types.bool;
default = config.components.monitor.metrics.enable;
};
influxDBPort = mkOption {
type = int;
default = 8088;
description = "Port to listen on influxDB input";
};
};
config = lib.mkMerge [
(mkIf config.components.monitor.telegraf.enable {
# opentelemetry wireing
services.opentelemetry-collector.settings = {
receivers.influxdb.endpoint = "127.0.0.1:${toString cfg.influxDBPort}";
service.pipelines.metrics.receivers = [ "influxdb" ];
};
services.telegraf.extraConfig.outputs.influxdb_v2.urls = [
"http://127.0.0.1:${toString cfg.influxDBPort}"
];
})
(mkIf config.components.monitor.telegraf.enable {
systemd.services.telegraf.path = [ pkgs.inetutils ];
services.telegraf = {
enable = true;
extraConfig = {
# https://github.com/influxdata/telegraf/tree/master/plugins/inputs < all them plugins
inputs = {
cpu = { };
diskio = { };
processes = { };
system = { };
systemd_units = { };
ping = [ { urls = [ "10.100.0.1" ]; } ]; # actually important to make machine visible over wireguard
};
};
};
})
];
}

View file

@ -0,0 +1,218 @@
{
pkgs,
config,
lib,
...
}:
with lib;
with types;
let
cfg = config.components.monitor.opentelemetry;
in
{
options.components.monitor.opentelemetry = {
enable = mkOption {
type = bool;
default = config.components.monitor.enable;
description = "weather or not to use opentelemetry";
};
receiver.endpoint = mkOption {
type = nullOr str;
default = null;
description = "endpoint to receive the opentelementry data from other collectors";
};
exporter.endpoint = mkOption {
type = nullOr str;
default = null;
description = "endpoint to ship opentelementry data too";
};
exporter.debug = mkOption {
type = nullOr (enum [
"logs"
"metrics"
]);
default = null;
description = "enable debug exporter.";
};
metrics.endpoint = mkOption {
type = str;
default = "127.0.0.1:8100";
description = "endpoint on where to provide opentelementry metrics";
};
};
config = mkMerge [
(mkIf config.components.monitor.opentelemetry.enable {
services.opentelemetry-collector = {
enable = true;
package = pkgs.opentelemetry-collector-contrib;
};
})
# add default tags to metrics
# todo : make sure we filter out metrics from otlp receivers
(mkIf config.components.monitor.enable {
services.opentelemetry-collector.settings = {
processors = {
# https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/resourcedetectionprocessor/README.md
"resourcedetection/system" = {
detectors = [ "system" ];
override = false;
system.hostname_sources = [ "os" ];
};
metricstransform.transforms = [
{
include = ".*";
match_type = "regexp";
action = "update";
operations = [
{
action = "add_label";
new_label = "machine";
new_value = config.networking.hostName;
}
];
}
];
};
};
})
(mkIf config.components.monitor.metrics.enable {
services.opentelemetry-collector.settings = {
service.pipelines.metrics.processors = [
"metricstransform"
"resourcedetection/system"
];
};
})
(mkIf config.components.monitor.logs.enable {
services.opentelemetry-collector.settings = {
service.pipelines.logs.processors = [ "resourcedetection/system" ];
};
})
(mkIf (config.components.monitor.opentelemetry.exporter.debug != null) {
services.opentelemetry-collector.settings = {
exporters.debug = {
verbosity = "detailed";
sampling_initial = 5;
sampling_thereafter = 200;
};
service.pipelines.${config.components.monitor.opentelemetry.exporter.debug} = {
exporters = [ "debug" ];
};
};
})
# ship to next instance
(mkIf (config.components.monitor.opentelemetry.exporter.endpoint != null) {
services.opentelemetry-collector.settings = {
exporters.otlp = {
endpoint = cfg.exporter.endpoint;
tls.insecure = true;
};
};
})
(mkIf
(
config.components.monitor.opentelemetry.exporter.endpoint != null
&& config.components.monitor.logs.enable
)
{
services.opentelemetry-collector.settings = {
service.pipelines.logs.exporters = [ "otlp" ];
};
}
)
(mkIf
(
config.components.monitor.opentelemetry.exporter.endpoint != null
&& config.components.monitor.metrics.enable
)
{
services.opentelemetry-collector.settings = {
service.pipelines.metrics.exporters = [ "otlp" ];
};
}
)
# ship from other instance
(mkIf (config.components.monitor.opentelemetry.receiver.endpoint != null) {
services.opentelemetry-collector.settings = {
receivers.otlp.protocols.grpc.endpoint = cfg.receiver.endpoint;
};
})
(mkIf
(
config.components.monitor.opentelemetry.receiver.endpoint != null
&& config.components.monitor.logs.enable
)
{
services.opentelemetry-collector.settings = {
service.pipelines.logs.receivers = [ "otlp" ];
};
}
)
(mkIf
(
config.components.monitor.opentelemetry.receiver.endpoint != null
&& config.components.monitor.metrics.enable
)
{
services.opentelemetry-collector.settings = {
service.pipelines.metrics.receivers = [ "otlp" ];
};
}
)
# scrape opentelemetry-colectors metrics
# todo: this should be collected another way (opentelemetry internal?)
# todo : enable me only when metrics.endpoint is set.
(mkIf config.components.monitor.metrics.enable {
services.opentelemetry-collector.settings = {
receivers = {
prometheus.config.scrape_configs = [
{
job_name = "otelcol";
scrape_interval = "10s";
static_configs = [
{
targets = [ cfg.metrics.endpoint ];
}
];
metric_relabel_configs = [
{
source_labels = [ "__name__" ];
regex = ".*grpc_io.*";
action = "drop";
}
];
}
];
};
service = {
pipelines.metrics = {
receivers = [ "prometheus" ];
};
# todo : this should be automatically be collected
# open telemetries own metrics?
telemetry.metrics.address = cfg.metrics.endpoint;
};
};
})
(mkIf (!config.components.monitor.metrics.enable) {
services.opentelemetry-collector.settings = {
service.telemetry.metrics.level = "none";
};
})
];
}

View file

@ -1,11 +1,5 @@
{ clanLib, ... }:
{ {
networking.extraHosts = '' networking.extraHosts = ''
95.216.66.212 orbi.public 95.216.66.212 orbi.public
23.35.228.101 store.steampowered.com
92.122.104.90 steamcommunity.com
''; '';
services.openssh.knownHosts = {
"orbi.public".publicKey = clanLib.readFact "ssh.id_ed25519.pub" "orbi";
};
} }

View file

@ -175,7 +175,7 @@ with lib;
]; ];
}) })
(entry { machine = "cherry"; }) (entry { machine = "cherry"; })
#(entry { machine = "cream"; }) (entry { machine = "cream"; })
(entry { machine = "mobi"; }) (entry { machine = "mobi"; })
(entry { machine = "bobi"; }) (entry { machine = "bobi"; })
{ {

View file

@ -8,6 +8,7 @@
with lib; with lib;
with types; with types;
let let
defaultRootKeyFiles = [ "${assets}/mrvandalo_rsa.pub" ];
cfg = config.components.network.sshd; cfg = config.components.network.sshd;
# maybe ascii-image-converter is also nice here # maybe ascii-image-converter is also nice here
@ -29,6 +30,11 @@ in
type = bool; type = bool;
default = true; default = true;
}; };
rootKeyFiles = mkOption {
type = with types; listOf path;
default = [ ];
description = "keys to root login";
};
onlyTincAccess = mkOption { onlyTincAccess = mkOption {
type = bool; type = bool;
default = false; default = false;
@ -58,13 +64,14 @@ in
# settings.LoginGraceTime = 0; # settings.LoginGraceTime = 0;
}; };
users.users.root.openssh.authorizedKeys.keyFiles = cfg.rootKeyFiles ++ defaultRootKeyFiles;
# todo enable again when I can it's possible to set the `-q` ssh option in clan # todo enable again when I can it's possible to set the `-q` ssh option in clan
#services.openssh.banner = builtins.readFile sshBanner; #services.openssh.banner = builtins.readFile sshBanner;
}) })
(mkIf (cfg.onlyTincAccess && cfg.enable) { (mkIf (cfg.onlyTincAccess && cfg.enable) {
# fixme: this is not working
networking.firewall.extraCommands = '' networking.firewall.extraCommands = ''
iptables --table nat --append PREROUTING ! --in-interface tinc.+ --protocol tcp --match tcp --dport 22 --jump REDIRECT --to-ports 0 iptables --table nat --append PREROUTING ! --in-interface tinc.+ --protocol tcp --match tcp --dport 22 --jump REDIRECT --to-ports 0
''; '';

View file

@ -15,18 +15,11 @@ in
services.openssh.knownHosts = { services.openssh.knownHosts = {
orbi = { orbi = {
hostNames = [ hostNames = [
"git.ingolf-wagner.de"
"95.216.66.212" "95.216.66.212"
]; ];
publicKey = publicKey "orbi"; publicKey = publicKey "orbi";
}; };
forgejo = {
hostNames = [
"[git.ingolf-wagner.de]:2222"
];
publicKey = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDQVomwXedtY4ZAKA1Bcsz6Ud2Ys3mjjGJXLcWQLceXKmmAQfOWqqLrTddhrLjmZImS3HTexGK7iNmYYCd/lG+7j1rMebmk3cddH6qr/4WB5SSexbLV4/vlk18kia6O+52ybrMzejwcfr9qNEg+bkrmc4btsWcT/w21vRyOzsmRRnk+S54prLGtiCypFqwGYnyr6HPXO3lqCLHIR/XurcFvh/aM/RGusWh889TXA39FezDcV6OZisZTTC4BBoUAS8r6XJnrIahZJmfEHp/FbKJV0pShCCQXtpkUBu7g6B2T+8u91fY4kc8O293XhaxjTBfkZH2lGodppGG12vAQSeznppbzlT82uTx80jyt7Hw/IAjn9j8D+iKC7fA9qawVz+p1UtqHQd43+8gOSiHILOUMhVxp7ZrfhYmaiOKrkR4+Juc9P2UsqrAvxS1WaYT4KaEZfze7/DCdK0h2SSY2jgCU9sNeJG6M4s/pPj+iI/O+AagHfBZ+bF2y+OKEZOW4J+OpqnEY3lANdxsMsD9PwBpAVAn9FAJzAy+gfXINu0wqGhtA3qWM0QVjcSXsfQJQ2XrbMPd9+pvOl1Ej5SSbjXYcPt9dXWl+L69dbU5qb8WRGl2ZxloUaRcOrOkmhybwI/61LfaGzLslnNn8ARjJgzu9xSipT5D4cZ1FgB9ezqC73Q==";
};
}; };
}; };

View file

@ -43,7 +43,6 @@ with lib;
in in
clanMachines clanMachines
// (device "iPhone" "RPQBSRB-DYEUUWQ-EAPMBA2-PL4MJ73-Y4F4ZTH-TAD7DUE-GEK56BG-HYW6YAF") // (device "iPhone" "RPQBSRB-DYEUUWQ-EAPMBA2-PL4MJ73-Y4F4ZTH-TAD7DUE-GEK56BG-HYW6YAF")
// (device "iPad" "NEGOJYU-EEDRM4E-XVZUKFO-63LAIOO-WHFFS2V-3SH3KR2-VYEFQLW-4QOFBQU")
// (device "bumba" "JS7PWTO-VKFGBUP-GNFLSWP-MGFJ2KH-HLO2LKW-V3RPCR6-PCB5SQC-42FCKQZ"); // (device "bumba" "JS7PWTO-VKFGBUP-GNFLSWP-MGFJ2KH-HLO2LKW-V3RPCR6-PCB5SQC-42FCKQZ");
settings.folders = { settings.folders = {
@ -61,7 +60,7 @@ with lib;
path = lib.mkDefault "/tmp/books"; path = lib.mkDefault "/tmp/books";
devices = [ devices = [
"chungus" "chungus"
# "cream" "cream"
"cherry" "cherry"
]; ];
versioning = { versioning = {
@ -74,7 +73,7 @@ with lib;
path = lib.mkDefault "/tmp/desktop"; path = lib.mkDefault "/tmp/desktop";
devices = [ devices = [
"chungus" "chungus"
# "cream" "cream"
"cherry" "cherry"
]; ];
}; };
@ -83,7 +82,7 @@ with lib;
path = lib.mkDefault "/tmp/finance"; path = lib.mkDefault "/tmp/finance";
devices = [ devices = [
"chungus" "chungus"
# "cream" "cream"
"cherry" "cherry"
]; ];
versioning = { versioning = {
@ -103,9 +102,9 @@ with lib;
enable = lib.mkDefault false; enable = lib.mkDefault false;
path = lib.mkDefault "/tmp/logseq"; path = lib.mkDefault "/tmp/logseq";
devices = [ devices = [
"cherry"
"chungus" "chungus"
"iPad" "cream"
"cherry"
"iPhone" "iPhone"
]; ];
}; };
@ -122,7 +121,7 @@ with lib;
path = lib.mkDefault "/tmp/oscar_cpap"; path = lib.mkDefault "/tmp/oscar_cpap";
devices = [ devices = [
"chungus" "chungus"
# "cream" "cream"
"cherry" "cherry"
]; ];
}; };
@ -131,7 +130,7 @@ with lib;
path = lib.mkDefault "/tmp/password-store"; path = lib.mkDefault "/tmp/password-store";
devices = [ devices = [
"chungus" "chungus"
# "cream" "cream"
"cherry" "cherry"
]; ];
versioning = { versioning = {
@ -144,7 +143,7 @@ with lib;
enable = lib.mkDefault false; enable = lib.mkDefault false;
path = lib.mkDefault "/tmp/password-store"; path = lib.mkDefault "/tmp/password-store";
devices = [ devices = [
# "cream" "cream"
"cherry" "cherry"
"orbi" "orbi"
]; ];

View file

@ -14,7 +14,7 @@ let
bobi = "10.23.42.25"; bobi = "10.23.42.25";
cherry = "10.23.42.29"; cherry = "10.23.42.29";
chungus = "10.23.42.28"; chungus = "10.23.42.28";
# cream = "10.23.42.27"; cream = "10.23.42.27";
mobi = "10.23.42.23"; mobi = "10.23.42.23";
orbi = "10.23.42.100"; orbi = "10.23.42.100";
}; };
@ -28,7 +28,6 @@ let
"photoprism.orbi" = hosts.orbi; "photoprism.orbi" = hosts.orbi;
# chungus # chungus
"video.chungus" = hosts.chungus; "video.chungus" = hosts.chungus;
"music.chungus" = hosts.chungus;
"de.tts.chungus" = hosts.chungus; "de.tts.chungus" = hosts.chungus;
"en.tts.chungus" = hosts.chungus; "en.tts.chungus" = hosts.chungus;
"flix.chungus" = hosts.chungus; "flix.chungus" = hosts.chungus;
@ -68,10 +67,10 @@ in
subnets = [ { address = hosts.bobi; } ]; subnets = [ { address = hosts.bobi; } ];
settings.Ed25519PublicKey = "jwvNd4oAgz2cWEI74VTVYU1qgPWq823/a0iEDqJ8KMD"; settings.Ed25519PublicKey = "jwvNd4oAgz2cWEI74VTVYU1qgPWq823/a0iEDqJ8KMD";
}; };
# cream = { cream = {
# subnets = [ { address = hosts.cream; } ]; subnets = [ { address = hosts.cream; } ];
# settings.Ed25519PublicKey = Ed25519PublicKey "cream"; settings.Ed25519PublicKey = Ed25519PublicKey "cream";
# }; };
cherry = { cherry = {
subnets = [ { address = hosts.cherry; } ]; subnets = [ { address = hosts.cherry; } ];
settings.Ed25519PublicKey = Ed25519PublicKey "cherry"; settings.Ed25519PublicKey = Ed25519PublicKey "cherry";

View file

@ -12,7 +12,7 @@ let
port = 721; port = 721;
hosts = { hosts = {
cherry = "10.123.42.29"; cherry = "10.123.42.29";
# cream = "10.123.42.27"; cream = "10.123.42.27";
robi = "10.123.42.123"; robi = "10.123.42.123";
sternchen = "10.123.42.25"; sternchen = "10.123.42.25";
sterni = "10.123.42.24"; sterni = "10.123.42.24";
@ -35,10 +35,10 @@ in
subnets = [ { address = hosts.sternchen; } ]; subnets = [ { address = hosts.sternchen; } ];
settings.Ed25519PublicKey = "Z567IKl00Kw5JFBNwMvjL33QYe2hRoNtQcNIDFRPReB"; settings.Ed25519PublicKey = "Z567IKl00Kw5JFBNwMvjL33QYe2hRoNtQcNIDFRPReB";
}; };
# cream = { cream = {
# subnets = [ { address = hosts.cream; } ]; subnets = [ { address = hosts.cream; } ];
# settings.Ed25519PublicKey = "Y/YRA90mAlNEmdhUWlUTHjjsco6d6hlvW11sPtarIdL"; settings.Ed25519PublicKey = "Y/YRA90mAlNEmdhUWlUTHjjsco6d6hlvW11sPtarIdL";
# }; };
cherry = { cherry = {
subnets = [ { address = hosts.cherry; } ]; subnets = [ { address = hosts.cherry; } ];
settings.Ed25519PublicKey = "BsPIrZjbzn0aryC0HO3OXSb4oFCMmzNDmMDQmxUXUuC"; settings.Ed25519PublicKey = "BsPIrZjbzn0aryC0HO3OXSb4oFCMmzNDmMDQmxUXUuC";
@ -80,13 +80,13 @@ in
); );
services.openssh.knownHosts = { services.openssh.knownHosts = {
# "cream.${network}" = { "cream.${network}" = {
# hostNames = [ hostNames = [
# "cream.${network}" "cream.${network}"
# hosts.cream hosts.cream
# ]; ];
# publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIConHiCL7INgAhuN6Z9TqP0zP+xNpdV7+OHwUca4IRDD"; publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIConHiCL7INgAhuN6Z9TqP0zP+xNpdV7+OHwUca4IRDD";
# }; };
"sternchen.${network}" = { "sternchen.${network}" = {
hostNames = [ hostNames = [
"sterni.${network}" "sterni.${network}"

View file

@ -25,8 +25,6 @@ with lib;
config = { config = {
networking.extraHosts = '' networking.extraHosts = ''
10.100.0.1 cache.orbi.wg0 10.100.0.1 cache.orbi.wg0
10.100.0.1 orbi.wg0
10.100.0.2 chungus.wg0
''; '';
}; };

View file

@ -17,6 +17,7 @@ with lib;
./direnv.nix ./direnv.nix
./git.nix ./git.nix
./heygpt.nix ./heygpt.nix
./hoard.nix
./remote-install.nix ./remote-install.nix
./wtf.nix ./wtf.nix
./zsh.nix ./zsh.nix

View file

@ -0,0 +1,83 @@
{
pkgs,
config,
lib,
...
}:
with lib;
let
hoardSrc = pkgs.fetchFromGitHub {
owner = "Hyde46";
repo = "hoard";
rev = "v1.3.1";
sha256 = "sha256-Gm3X6/g5JQJEl7wRvWcO4j5XpROhtfRJ72LNaUeZRGc=";
};
in
{
options.components.terminal.hoard.enable = mkOption {
type = lib.types.bool;
default = config.components.terminal.enable;
};
config = mkIf (config.components.terminal.hoard.enable) {
# todo : sync via syncthing
#backup.dirs = [
# "/root/.config/hoard"
# "/home/palo/.config/hoard"
#];
environment.systemPackages = [ pkgs.legacy_2211.hoard ];
home-manager.users.mainUser = {
xdg.configFile."hoard/config.yml".text = builtins.toJSON {
version = "1.0.1";
default_namespace = "default";
config_home_path = "/home/palo/.config/hoard";
trove_path = "/home/palo/.config/hoard/trove.yml";
query_prefix = " >";
primary_color = [
87
142
87
];
secondary_color = [
203
184
144
];
tertiary_color = [
30
30
30
];
command_color = [
30
30
30
];
parameter_token = "#";
read_from_current_directory = true;
};
programs.zsh.initExtra = ''
export HOARD_NOBIND=1
source ${hoardSrc}/src/shell/hoard.zsh
bindkey '^x' _hoard_list_widget
'';
};
# use showkey -a
# Ctrl-h is equivalent to Ctrl-Backspace (for some reason)
programs.zsh.interactiveShellInit = ''
export HOARD_NOBIND=1
source ${hoardSrc}/src/shell/hoard.zsh
bindkey '^x' _hoard_list_widget
'';
programs.bash.interactiveShellInit = ''
export HOARD_NOBIND=1
source ${hoardSrc}/src/shell/hoard.bash
bind -x '"\C-x": __hoard_list'
'';
};
}

View file

@ -23,7 +23,7 @@ let
${pkgs.iw}/bin/iw dev \ ${pkgs.iw}/bin/iw dev \
| ${pkgs.gnused}/bin/sed -n 's/^\s*Interface\s\+\([0-9a-z]\+\)$/\1/p' | ${pkgs.gnused}/bin/sed -n 's/^\s*Interface\s\+\([0-9a-z]\+\)$/\1/p'
); do ); do
inet=$(${pkgs.iproute2}/bin/ip addr show $dev \ inet=$(${pkgs.iproute}/bin/ip addr show $dev \
| ${pkgs.gnused}/bin/sed -n 's/.*inet \([0-9]\+\.[0-9]\+\.[0-9]\+\.[0-9]\+\).*/\1/p') \ | ${pkgs.gnused}/bin/sed -n 's/.*inet \([0-9]\+\.[0-9]\+\.[0-9]\+\.[0-9]\+\).*/\1/p') \
|| unset inet || unset inet
ssid=$(${pkgs.iw}/bin/iw dev $dev link \ ssid=$(${pkgs.iw}/bin/iw dev $dev link \

View file

@ -16,6 +16,7 @@ with lib;
environment.systemPackages = [ pkgs.fail2ban ]; environment.systemPackages = [ pkgs.fail2ban ];
services.fail2ban = { services.fail2ban = {
enable = true; enable = true;
#package = pkgs.legacy_2311.fail2ban;
jails = { }; jails = { };
}; };
}) })

File diff suppressed because it is too large Load diff

161
flake.nix
View file

@ -5,48 +5,40 @@
clan-core.inputs.flake-parts.follows = "flake-parts"; clan-core.inputs.flake-parts.follows = "flake-parts";
clan-core.inputs.nixpkgs.follows = "nixpkgs"; clan-core.inputs.nixpkgs.follows = "nixpkgs";
clan-core.url = "git+https://git.clan.lol/clan/clan-core?rev=1bd3af310ea074d0ea9de6233376476c6ca9149a"; # last time clan was using facts instead of vars clan-core.url = "git+https://git.clan.lol/clan/clan-core";
clan-fact-generators.inputs.clan-core.follows = "clan-core"; clan-fact-generators.inputs.clan-core.follows = "clan-core";
clan-fact-generators.url = "github:mrvandalo/clan-fact-generators"; clan-fact-generators.url = "github:mrvandalo/clan-fact-generators";
flake-parts.inputs.nixpkgs-lib.follows = "nixpkgs"; flake-parts.inputs.nixpkgs-lib.follows = "nixpkgs";
flake-parts.url = "github:hercules-ci/flake-parts"; flake-parts.url = "github:hercules-ci/flake-parts";
healthchecks.inputs.nixpkgs.follows = "nixpkgs";
healthchecks.url = "github:mrvandalo/nixos-healthchecks";
#healthchecks.url = "git+file:///home/palo/dev/nixos/healthcheck";
home-manager-utils.inputs.home-manager.follows = "home-manager"; home-manager-utils.inputs.home-manager.follows = "home-manager";
home-manager-utils.url = "github:mrvandalo/home-manager-utils"; home-manager-utils.url = "github:mrvandalo/home-manager-utils";
home-manager.inputs.nixpkgs.follows = "nixpkgs"; home-manager.inputs.nixpkgs.follows = "nixpkgs";
home-manager.url = "github:nix-community/home-manager"; home-manager.url = "github:nix-community/home-manager";
kmonad.inputs.nixpkgs.follows = "nixpkgs"; # fixme: kmonad crashes every now and than and the keyboard is not usable anymore.
kmonad.url = "github:kmonad/kmonad?dir=nix"; # todo: mabye use https://github.com/jtroo/kanata instead
landingpage.url = "github:mrVanDalo/landingpage"; landingpage.url = "github:mrVanDalo/landingpage";
nixos-anywhere.url = "github:nix-community/nixos-anywhere";
nix-topology.inputs.nixpkgs.follows = "nixpkgs"; nix-topology.inputs.nixpkgs.follows = "nixpkgs";
nix-topology.url = "github:oddlama/nix-topology"; nix-topology.url = "github:oddlama/nix-topology";
nixos-anywhere.url = "github:nix-community/nixos-anywhere";
nixos-hardware.url = "github:nixos/nixos-hardware"; nixos-hardware.url = "github:nixos/nixos-hardware";
nixpkgs-legacy_2211.url = "github:nixos/nixpkgs/nixos-22.11"; nixpkgs-legacy_2211.url = "github:nixos/nixpkgs/nixos-22.11";
nixpkgs-legacy_2311.url = "github:nixos/nixpkgs/nixos-23.11"; nixpkgs-legacy_2311.url = "github:nixos/nixpkgs/nixos-23.11";
nixpkgs-legacy_2405.url = "github:nixos/nixpkgs/nixos-24.05"; nixpkgs-legacy_2405.url = "github:nixos/nixpkgs/nixos-24.05";
nixpkgs-legacy_2411.url = "github:nixos/nixpkgs/nixos-24.11";
nixpkgs-unstable-small.url = "github:nixos/nixpkgs/nixos-unstable-small"; nixpkgs-unstable-small.url = "github:nixos/nixpkgs/nixos-unstable-small";
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
permown.inputs.nixpkgs.follows = "nixpkgs"; permown.inputs.nixpkgs.follows = "nixpkgs";
permown.url = "github:mrVanDalo/module.permown"; permown.url = "github:mrVanDalo/module.permown";
polygon-art.url = "git+https://git.ingolf-wagner.de/palo/polygon-art.git"; polygon-art.url = "git+https://git.ingolf-wagner.de/palo/polygon-art.git";
private-parts.inputs.nixpkgs.follows = "nixpkgs"; # only private input private-parts.inputs.nixpkgs.follows = "nixpkgs"; # only private input
private-parts.url = "git+ssh://forgejo@git.ingolf-wagner.de:2222/palo/nixos-private-parts.git?ref=main"; private-parts.url = "git+ssh://forgejo@git.ingolf-wagner.de/palo/nixos-private-parts.git?ref=main";
#private-parts.url = "git+file:///home/palo/dev/nixos/nixos-private-parts"; #private-parts.url = "git+file:///home/palo/dev/nixos/nixos-private-parts";
retiolum.url = "github:Mic92/retiolum"; retiolum.url = "github:Mic92/retiolum";
share-http.inputs.nixpkgs.follows = "nixpkgs"; # only private input
share-http.url = "git+ssh://forgejo@git.ingolf-wagner.de:2222/palo/share-host.git?ref=main";
srvos.url = "github:nix-community/srvos"; srvos.url = "github:nix-community/srvos";
stylix.inputs.home-manager.follows = "home-manager"; stylix.inputs.home-manager.follows = "home-manager";
stylix.inputs.nixpkgs.follows = "nixpkgs"; stylix.inputs.nixpkgs.follows = "nixpkgs";
stylix.url = "github:danth/stylix"; stylix.url = "github:danth/stylix";
taskwarrior.inputs.nixpkgs.follows = "nixpkgs"; taskshell.inputs.nixpkgs.follows = "nixpkgs";
taskwarrior.url = "github:mrvandalo/taskwarrior-flake"; taskshell.url = "github:mrvandalo/taskshell";
#taskwarrior.url = "git+file:///home/palo/dev/nixos/taskwarrior-flake";
telemetry.inputs.nixpkgs.follows = "nixpkgs";
telemetry.url = "github:mrvandalo/nixos-telemetry";
#telemetry.url = "git+file:///home/palo/dev/nixos/nixos-telemetry";
treefmt-nix.inputs.nixpkgs.follows = "nixpkgs"; treefmt-nix.inputs.nixpkgs.follows = "nixpkgs";
treefmt-nix.url = "github:numtide/treefmt-nix"; treefmt-nix.url = "github:numtide/treefmt-nix";
@ -60,33 +52,30 @@
outputs = outputs =
inputs@{ inputs@{
self,
clan-core, clan-core,
clan-fact-generators, clan-fact-generators,
flake-parts, flake-parts,
healthchecks,
home-manager, home-manager,
home-manager-utils, home-manager-utils,
kmonad,
landingpage, landingpage,
nix-topology,
nixos-anywhere, nixos-anywhere,
nixos-hardware, nixos-hardware,
nixpkgs, nixpkgs,
nixpkgs-legacy_2211, nixpkgs-legacy_2211,
nixpkgs-legacy_2311, nixpkgs-legacy_2311,
nixpkgs-legacy_2405, nixpkgs-legacy_2405,
nixpkgs-legacy_2411,
nixpkgs-unstable-small, nixpkgs-unstable-small,
permown, permown,
polygon-art, polygon-art,
private-parts, private-parts,
retiolum, retiolum,
self,
share-http,
srvos, srvos,
stylix, stylix,
taskwarrior, taskshell,
telemetry,
treefmt-nix, treefmt-nix,
nix-topology,
}: }:
let let
@ -139,15 +128,13 @@
}; };
polygon-art = polygon-art.packages.${system}; polygon-art = polygon-art.packages.${system};
landingpage = landingpage.packages.${system}.plain; landingpage = landingpage.packages.${system}.plain;
share-via-http = share-http.packages.${system}.default; kmonad = kmonad.packages.${system}.kmonad;
inherit (taskwarrior.packages.${system}) tasksh = taskshell.packages.${system}.tasksh;
bugwarrior
tasksh
taskwarrior-hooks
;
inherit (self.packages.${system}) inherit (self.packages.${system})
otpmenu otpmenu
taskwarrior-hooks
nsxiv nsxiv
bugwarrior
; ;
}) })
]; ];
@ -179,7 +166,6 @@
nixpkgs.pkgs = meta.pkgs; nixpkgs.pkgs = meta.pkgs;
nixpkgs.hostPlatform = meta.system; nixpkgs.hostPlatform = meta.system;
clan.core.facts.secretStore = "password-store"; clan.core.facts.secretStore = "password-store";
clan.core.vars.settings.secretStore = "password-store";
imports = imports =
modules modules
@ -227,26 +213,25 @@
]; ];
}; };
defaultAuthorizedKeys =
{ config, pkgs, ... }:
{
users.users.root.openssh.authorizedKeys.keyFiles = [
# yubikey key
./assets/mrvandalo_rsa.pub
# backup key
"${config.clan.core.clanDir}/machines/chungus/facts/ssh.syncoid.id_ed25519.pub"
"${config.clan.core.clanDir}/machines/chungus/facts/ssh.rbackup.id_ed25519.pub"
"${config.clan.core.clanDir}/machines/chungus/facts/ssh.paperless-ngx.id_ed25519.pub"
];
environment.systemPackages = [ pkgs.borgbackup ];
};
defaultModules = [ defaultModules = [
# make flake inputs accessiable in NixOS # make flake inputs accessiable in NixOS
{ {
_module.args.self = self; _module.args.self = self;
_module.args.inputs = self.inputs; _module.args.inputs = self.inputs;
} }
# ssh keys
(
{ config, ... }:
{
users.users.root.openssh.authorizedKeys.keyFiles = [
# master key
./assets/mrvandalo_rsa.pub
# backup key
"${config.clan.core.clanDir}/machines/chungus/facts/ssh.syncoid.id_ed25519.pub"
"${config.clan.core.clanDir}/machines/chungus/facts/ssh.rbackup.id_ed25519.pub"
];
}
)
{ {
# disable emergency mode everywhere, although it might be needed on laptops # disable emergency mode everywhere, although it might be needed on laptops
boot.initrd.systemd.emergencyAccess = false; boot.initrd.systemd.emergencyAccess = false;
@ -284,13 +269,13 @@
./components ./components
./features ./features
#./modules #./modules
clan-core.nixosModules.clanCore inputs.clan-core.nixosModules.clanCore
telemetry.nixosModules.telemetry
{ {
clan.core.clanDir = ./.; # fixes issues with clanCore https://git.clan.lol/clan/clan-core/issues/1979 clan.core.clanDir = ./.; # fixes issues with clanCore https://git.clan.lol/clan/clan-core/issues/1979
} }
# inputs.stylix.nixosModules.stylix # fixme: not working # inputs.stylix.nixosModules.stylix # fixme: not working
permown.nixosModules.permown permown.nixosModules.permown
kmonad.nixosModules.default
home-manager.nixosModules.home-manager home-manager.nixosModules.home-manager
# retiolum.nixosModules.retiolum # fixme: not working # retiolum.nixosModules.retiolum # fixme: not working
]; ];
@ -305,34 +290,10 @@
./features ./features
./modules # todo : spread this across features and components ./modules # todo : spread this across features and components
#./system/all # todo : spread this across features and components #./system/all # todo : spread this across features and components
(
{ lib, pkgs, ... }:
{
telemetry.netdata.enable = false;
# "fixes" https://github.com/NixOS/nixpkgs/issues/356708
#services.opentelemetry-collector.package = lib.mkForce pkgs.legacy_2405.opentelemetry-collector-contrib;
services.opentelemetry-collector.package = lib.mkForce (
pkgs.opentelemetry-collector-contrib.overrideAttrs (old: rec {
version = "0.110.0";
src = pkgs.fetchFromGitHub {
owner = "open-telemetry";
repo = "opentelemetry-collector-contrib";
rev = "v${version}";
hash = "sha256-bDtP7EFKus0NJpLccbD+HlzEusc+KAbKWmS/KGthtwY=";
};
vendorHash = "sha256-pDDEqtXu167b+J1+k7rC1BE5/ehxzG0ZAkhxqmJpHsg=";
})
);
}
)
# some modules I always use # some modules I always use
telemetry.nixosModules.telemetry
permown.nixosModules.permown permown.nixosModules.permown
kmonad.nixosModules.default
# some default things I always want # some default things I always want
( (
{ pkgs, ... }: { pkgs, ... }:
@ -376,11 +337,10 @@
package = pkgs.nerdfonts.override { fonts = [ "JetBrainsMono" ]; }; package = pkgs.nerdfonts.override { fonts = [ "JetBrainsMono" ]; };
name = "JetBrains Mono"; name = "JetBrains Mono";
}; };
emoji = config.stylix.fonts.monospace; emoji = {
# emoji = { package = pkgs.noto-fonts-emoji;
# package = pkgs.noto-fonts-emoji; name = "Noto Color Emoji";
# name = "Noto Color Emoji"; };
# };
sizes.popups = 15; sizes.popups = 15;
}; };
}; };
@ -399,7 +359,6 @@
home-manager.backupFileExtension = "backup"; home-manager.backupFileExtension = "backup";
home-manager.sharedModules = [ home-manager.sharedModules = [
home-manager-utils.hmModule home-manager-utils.hmModule
taskwarrior.hmModules.bugwarrior
]; ];
}; };
@ -416,9 +375,9 @@
systems = [ "x86_64-linux" ]; systems = [ "x86_64-linux" ];
imports = [ imports = [
clan-core.flakeModules.default clan-core.flakeModules.default
healthchecks.flakeModule
./nix/formatter.nix ./nix/formatter.nix
./nix/packages ./nix/packages
./nix/verify
./nix/topology ./nix/topology
]; ];
@ -430,11 +389,32 @@
machines = { machines = {
cream = clanSetup {
name = "cream";
host = "cream.bear";
modules = [
zerotierModules
nixos-hardware.nixosModules.framework-12th-gen-intel
retiolum.nixosModules.retiolum
private-parts.nixosModules.cream
homeManagerModules
stylixModules
{ home-manager.users.mainUser.gui.enable = true; }
{
home-manager.users.mainUser = import ./homes/palo;
home-manager.users.root = import ./homes/root;
}
{
clan.core.machineDescription = "Laptop";
}
];
};
cherry = clanSetup { cherry = clanSetup {
name = "cherry"; name = "cherry";
host = "cherry.bear"; host = "cherry.bear";
modules = [ modules = [
healthchecks.nixosModules.default self.nixosModules.verify
zerotierModules zerotierModules
nixos-hardware.nixosModules.framework-13th-gen-intel nixos-hardware.nixosModules.framework-13th-gen-intel
retiolum.nixosModules.retiolum retiolum.nixosModules.retiolum
@ -449,15 +429,6 @@
{ {
clan.core.machineDescription = "Laptop"; clan.core.machineDescription = "Laptop";
} }
(
{ config, ... }:
{
# keys only to access cherry
users.users.root.openssh.authorizedKeys.keyFiles = [
"${config.clan.core.clanDir}/machines/cherry/facts/ssh.root.cherry.id_ed25519.pub"
];
}
)
]; ];
}; };
@ -465,7 +436,7 @@
name = "chungus"; name = "chungus";
host = "chungus.bear"; host = "chungus.bear";
modules = [ modules = [
healthchecks.nixosModules.default self.nixosModules.verify
zerotierModules zerotierModules
zerotierControllerModule zerotierControllerModule
homeManagerModules homeManagerModules
@ -479,15 +450,6 @@
{ {
clan.core.machineDescription = "Home Server"; clan.core.machineDescription = "Home Server";
} }
(
{ config, ... }:
{
# keys only to access chungus
users.users.root.openssh.authorizedKeys.keyFiles = [
"${config.clan.core.clanDir}/machines/cherry/facts/ssh.root.chungus.id_ed25519.pub"
];
}
)
]; ];
}; };
@ -496,8 +458,7 @@
host = "orbi.bear"; host = "orbi.bear";
#host = "95.216.66.212"; #host = "95.216.66.212";
modules = [ modules = [
defaultAuthorizedKeys self.nixosModules.verify
healthchecks.nixosModules.default
homeManagerModules homeManagerModules
stylixModules stylixModules
zerotierModules zerotierModules
@ -519,7 +480,6 @@
#host = "167.235.205.150"; #host = "167.235.205.150";
host = "95.217.18.54"; host = "95.217.18.54";
modules = [ modules = [
defaultAuthorizedKeys
homeManagerModules homeManagerModules
stylixModules stylixModules
srvos.nixosModules.hardware-hetzner-cloud srvos.nixosModules.hardware-hetzner-cloud
@ -541,7 +501,6 @@
#host = "usbstick.bear"; #host = "usbstick.bear";
host = "10.100.0.100"; host = "10.100.0.100";
modules = [ modules = [
defaultAuthorizedKeys
homeManagerModules homeManagerModules
stylixModules stylixModules
zerotierModules zerotierModules

View file

@ -3,8 +3,7 @@
imports = [ imports = [
./editor.nix ./editor.nix
./network.nix ./network.nix
#./oh-my-posh ./oh-my-posh
./starship-rs
./packages.nix ./packages.nix
./terminal.nix ./terminal.nix
./zfs.nix ./zfs.nix

View file

@ -23,9 +23,7 @@ with lib;
gimoji gimoji
#tldr tldr
tealdeer
navi # cheatsheet manager
bandwhich # todo : put this to common/networking.nix bandwhich # todo : put this to common/networking.nix
@ -39,12 +37,8 @@ with lib;
(writers.writeBashBin "vulnix-system" '' (writers.writeBashBin "vulnix-system" ''
${vulnix}/bin/vulnix --profile /nix/var/nix/profiles/system ${vulnix}/bin/vulnix --profile /nix/var/nix/profiles/system
'') '')
# cpu load monitor
glances
]; ];
# cpu load monitor
programs.btop.enable = true; programs.btop.enable = true;
} }

View file

@ -1,33 +0,0 @@
{
pkgs,
config,
lib,
...
}:
with lib;
with config.lib.stylix.colors.withHashtag;
{
programs.starship = {
enable = true;
# download presets from : https://starship.rs/presets/
settings = builtins.fromTOML ((builtins.readFile ./gruvbox-rainbow.toml)) // {
palettes.stylix = {
color_fg0 = base01;
color_terminal_fg = base05;
color_terminal_bg = base00;
color_bg1 = base04;
color_bg2 = base02;
color_bg3 = base03;
color_blue = base0D;
color_aqua = base0C;
color_green = base0B;
color_orange = base0F;
color_purple = base0E;
color_red = base08;
color_yellow = base0A;
};
};
};
}

View file

@ -1,184 +0,0 @@
"$schema" = 'https://starship.rs/config-schema.json'
format = """
$os\
$username\
$hostname \
[](bg:color_yellow fg:color_terminal_bg)\
$directory\
[](fg:color_yellow bg:color_aqua)\
$git_branch\
$git_status\
[](fg:color_aqua bg:color_blue)\
$c\
$rust\
$golang\
$nodejs\
$php\
$java\
$kotlin\
$haskell\
$python\
[](fg:color_blue bg:color_bg3)\
$docker_context\
$conda\
[](fg:color_bg3 bg:color_bg1)\
$time\
[ ](fg:color_bg1)\
$character"""
palette = 'stylix' # we use stylix instead of gruvbox_dark
# todo : use stylix/base16 scheme
[palettes.gruvbox_dark]
color_fg0 = '#fbf1c7'
color_terminal_bg = '#fbf1c7' # original background
color_terminal_fg = '#3c3836' # original foreground
color_bg1 = '#3c3836'
color_bg2 = '#665c54'
color_bg3 = '#665c54'
color_blue = '#458588'
color_aqua = '#689d6a'
color_green = '#98971a'
color_orange = '#d65d0e'
color_purple = '#b16286'
color_red = '#cc241d'
color_yellow = '#d79921'
[os]
disabled = false
style = "bold bg:color_blue fg:color_terminal_bg"
#format = "[$symbol ]($style)"
format = "[](color_blue)[$symbol ]($style)[ ](fg:color_blue bg:color_terminal_bg)"
[os.symbols]
Alpine = ""
Amazon = ""
Android = ""
Arch = "󰣇"
Artix = "󰣇"
CentOS = ""
Debian = "󰣚"
EndeavourOS = ""
Fedora = "󰣛"
Gentoo = "󰣨"
Linux = "󰌽"
Macos = "󰀵"
Manjaro = ""
Mint = "󰣭"
NixOS = ""
Pop = ""
Raspbian = "󰐿"
RedHatEnterprise = "󱄛"
Redhat = "󱄛"
SUSE = ""
Ubuntu = "󰕈"
Windows = "󰍲"
[username]
show_always = true
style_user = "bg:color_terminal_bg fg:color_terminal_fg"
style_root = "bg:color_terminal_bg fg:color_red bold"
format = '[$user]($style)'
[hostname]
ssh_only = true
style = "bg:color_terminal_bg fg:color_terminal_fg"
ssh_symbol = "@"
format = "[$ssh_symbol$hostname]($style)"
[directory]
style = "fg:color_fg0 bg:color_yellow"
format = "[ $path ]($style)"
truncation_length = 3
truncation_symbol = "…/"
[directory.substitutions]
"Documents" = "󰈙 "
"Downloads" = " "
"Music" = "󰝚 "
"Pictures" = " "
"Developer" = "󰲋 "
"dev" = "󰲋 "
[git_branch]
symbol = ""
style = "bg:color_aqua"
format = '[[ $symbol $branch ](fg:color_fg0 bg:color_aqua)]($style)'
[git_status]
style = "bg:color_aqua"
format = '[[($all_status$ahead_behind )](fg:color_fg0 bg:color_aqua)]($style)'
[nodejs]
symbol = ""
style = "bg:color_blue"
format = '[[ $symbol( $version) ](fg:color_fg0 bg:color_blue)]($style)'
[c]
symbol = " "
style = "bg:color_blue"
format = '[[ $symbol( $version) ](fg:color_fg0 bg:color_blue)]($style)'
[rust]
symbol = ""
style = "bg:color_blue"
format = '[[ $symbol( $version) ](fg:color_fg0 bg:color_blue)]($style)'
[golang]
symbol = ""
style = "bg:color_blue"
format = '[[ $symbol( $version) ](fg:color_fg0 bg:color_blue)]($style)'
[php]
symbol = ""
style = "bg:color_blue"
format = '[[ $symbol( $version) ](fg:color_fg0 bg:color_blue)]($style)'
[java]
symbol = ""
style = "bg:color_blue"
format = '[[ $symbol( $version) ](fg:color_fg0 bg:color_blue)]($style)'
[kotlin]
symbol = ""
style = "bg:color_blue"
format = '[[ $symbol( $version) ](fg:color_fg0 bg:color_blue)]($style)'
[haskell]
symbol = ""
style = "bg:color_blue"
format = '[[ $symbol( $version) ](fg:color_fg0 bg:color_blue)]($style)'
[python]
symbol = ""
style = "bg:color_blue"
format = '[[ $symbol( $version) ](fg:color_fg0 bg:color_blue)]($style)'
[docker_context]
symbol = ""
style = "bg:color_bg3"
format = '[[ $symbol( $context) ](fg:color_fg0 bg:color_bg3)]($style)'
[conda]
style = "bg:color_bg3"
format = '[[ $symbol( $environment) ](fg:color_fg0 bg:color_bg3)]($style)'
[time]
disabled = false
time_format = "%R"
style = "bg:color_bg1"
format = '[[  $time ](fg:color_fg0 bg:color_bg1)]($style)'
[line_break]
disabled = false
[character]
disabled = false
success_symbol = "[](fg:color_bg2)[ ](bold fg:color_terminal_fg bg:color_bg2)[](fg:color_bg2)"
error_symbol = "[](fg:color_bg2)[ ](bold fg:color_red bg:color_bg2)[](fg:color_bg2)"
vimcmd_symbol = '[](bold fg:color_green)'
vimcmd_replace_one_symbol = '[](bold fg:color_purple)'
vimcmd_replace_symbol = '[](bold fg:color_purple)'
vimcmd_visual_symbol = '[](bold fg:color_yellow)'

View file

@ -20,8 +20,6 @@ with lib;
zed-editor zed-editor
minicom # for flipper zero
#jetbrains.mps #jetbrains.mps
#jetbrains.datagrip #jetbrains.datagrip

View file

@ -11,17 +11,13 @@ with lib;
home.packages = [ home.packages = [
#pureref pureref
gimp gimp
inkscape inkscape
imagemagick imagemagick
blender blender
lightburn lightburn
colorpicker
# to convert HEIC -> JPG
# heif-dec -q 92 <name>.HEIC
libheif
darktable
# CAD & 3D Plotting # CAD & 3D Plotting
openscad openscad

View file

@ -11,9 +11,6 @@ with lib;
(mkIf config.gui.enable { (mkIf config.gui.enable {
home.packages = [ home.packages = [
pkgs.share-via-http
pkgs.freetube pkgs.freetube
pkgs.vlc pkgs.vlc

View file

@ -55,14 +55,6 @@ with lib;
#seamly2d #seamly2d
#valentina #valentina
# xorg/x11 macros
# ---------------
# wait 2 secs, than record mouse movements (use Ctrl-C to stop recording)
# > cnee --record --mouse -o ./mouse-events.xnl --time 2
# replay 3 times the mouse movements (zsh only)
# > repeat 3 cnee --time 2 --replay -f ./mouse-events.xnl
xnee
]; ];
}; };

View file

@ -13,8 +13,7 @@ with lib;
home.packages = [ home.packages = [
emoji-picker emoji-picker
signal-desktop signal-desktop
#legacy_2311.fluffychat legacy_2311.fluffychat
#fluffychat
]; ];
}; };

View file

@ -7,18 +7,96 @@
with lib; with lib;
with types; with types;
let let
mkMagicMergeOption =
{
description ? "",
example ? { },
default ? { },
apply ? id,
...
}:
mkOption {
inherit
example
description
default
apply
;
type =
with lib.types;
let
valueType =
nullOr (oneOf [
bool
int
float
str
(attrsOf valueType)
(listOf valueType)
])
// {
description = "bool, int, float or str";
emptyValue.value = { };
};
in
valueType;
};
#taskwarrior-tui = pkgs.legacy_2311.taskwarrior-tui; #taskwarrior-tui = pkgs.legacy_2311.taskwarrior-tui;
taskwarrior-tui = pkgs.taskwarrior-tui; taskwarrior-tui = pkgs.taskwarrior-tui;
taskwarrior = pkgs.taskwarrior3; taskwarrior = pkgs.taskwarrior3;
in in
{ {
config = mkIf config.gui.enable { # bugwarrior (a bit fiddly)
imports = [
{
bugwarrior.enable = true; options.bugwarrior.config = mkMagicMergeOption {
type = attrs;
default = { };
};
config = mkIf config.gui.enable {
home.file.".config/bugwarrior/bugwarrior.toml".source =
(pkgs.formats.toml { }).generate "bugwarriorrc.toml"
(
{
general.taskrc = pkgs.writeText "taskrc" "data.location=$HOME/.bugwarrior";
}
// config.bugwarrior.config
);
home.packages = [
pkgs.bugwarrior
#export TASKRC=$HOME/.bugwarrior/${pkgs.writeText "bugwarrior.taskrc" "data.location=$HOME/.bugwarrior"}
(pkgs.writers.writeBashBin "bugwarrior-sync" ''
set -eo pipefail
mkdir -p $HOME/.bugwarrior
touch $HOME/.bugwarrior/taskrc
export TASKRC=$HOME/.bugwarrior/taskrc
export TASKDATA=$HOME/.bugwarrior
echo "bugwarrior pull" | ${pkgs.boxes}/bin/boxes -d ansi
${pkgs.bugwarrior}/bin/bugwarrior pull "$@"
echo "task export" | ${pkgs.boxes}/bin/boxes -d ansi
${pkgs.taskwarrior}/bin/task export > $HOME/.bugwarrior/bugwarrior.json
unset TASKRC
unset TASKDATA
echo "task import" | ${pkgs.boxes}/bin/boxes -d ansi
${taskwarrior}/bin/task import rc.hooks=0 $HOME/.bugwarrior/bugwarrior.json
'')
];
};
}
];
config = mkIf config.gui.enable {
home.packages = [ home.packages = [

View file

@ -0,0 +1,148 @@
{
# cat ~/.ssh/id_rsa.pub
publicSshKey ? "",
# remote-install-get-hiddenReceiver
hiddenReceiver ? "",
}:
{
config,
lib,
pkgs,
...
}:
{
imports = [
{
# system setup
networking.hostName = "liveos";
users.extraUsers = {
root = {
openssh.authorizedKeys.keys = [ publicSshKey ];
};
};
}
{
# installed packages
environment.systemPackages = with pkgs; [
#style
most
rxvt_unicode.terminfo
#monitoring tools
htop
iotop
#network
iptables
iftop
nmap
#stuff for dl
aria2
#neat utils
pciutils
psmisc
tmux
usbutils
git
#unpack stuff
p7zip
unzip
unrar
#data recovery
ddrescue
ntfs3g
dosfstools
];
}
{
# bash configuration
programs.bash = {
enableCompletion = true;
interactiveShellInit = ''
HISTCONTROL='erasedups:ignorespace'
HISTSIZE=65536
HISTFILESIZE=$HISTSIZE
shopt -s checkhash
shopt -s histappend histreedit histverify
shopt -s no_empty_cmd_completion
complete -d cd
'';
promptInit = ''
if test $UID = 0; then
PS1='\[\033[1;31m\]\w\[\033[0m\] '
PROMPT_COMMAND='echo -ne "\033]0;$$ $USER@$PWD\007"'
elif test $UID = 1337; then
PS1='\[\033[1;32m\]\w\[\033[0m\] '
PROMPT_COMMAND='echo -ne "\033]0;$$ $PWD\007"'
else
PS1='\[\033[1;33m\]\u@\w\[\033[0m\] '
PROMPT_COMMAND='echo -ne "\033]0;$$ $USER@$PWD\007"'
fi
if test -n "$SSH_CLIENT"; then
PS1='\[\033[35m\]\h'" $PS1"
PROMPT_COMMAND='echo -ne "\033]0;$$ $HOSTNAME $USER@$PWD\007"'
fi
'';
};
}
{
# ssh configuration
services.openssh.enable = true;
services.openssh.passwordAuthentication = false;
systemd.services.sshd.wantedBy = lib.mkForce [ "multi-user.target" ];
}
{
# hidden ssh announce
config =
let
torDirectory = "/var/lib/tor";
hiddenServiceDir = torDirectory + "/onion/hidden-ssh";
in
{
services.tor = {
enable = true;
client.enable = true;
relay.onionServices.hidden-ssh = {
version = 3;
map = [
{
port = 22;
target.port = 22;
}
];
};
};
systemd.services.hidden-ssh-announce = {
description = "irc announce hidden ssh";
after = [
"tor.service"
"network-online.target"
];
wants = [ "tor.service" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
ExecStart = pkgs.writers.writeDash "irc-announce-ssh" ''
set -efu
until test -e ${hiddenServiceDir}/hostname; do
echo "still waiting for ${hiddenServiceDir}/hostname"
sleep 1
done
until ${pkgs.tor}/bin/torify ${pkgs.netcat-openbsd}/bin/nc -z ${hiddenReceiver} 1337; do sleep 1; done && \
echo "torify ssh root@$(cat ${hiddenServiceDir}/hostname) -i ~/.ssh/id_rsa" | ${pkgs.tor}/bin/torify ${pkgs.nmap}/bin/ncat ${hiddenReceiver} 1337
'';
PrivateTmp = "true";
User = "tor";
Type = "oneshot";
};
};
};
}
];
}

View file

@ -0,0 +1,61 @@
{
config,
lib,
pkgs,
...
}:
{
imports = [
# Include the results of the hardware scan.
./hardware-configuration.nix
"${builtins.fetchTarball "https://github.com/nix-community/disko/archive/master.tar.gz"}/module.nix"
(import ./disko-config.nix { })
];
networking.hostName = "nixos";
boot.supportedFilesystems = [ "zfs" ];
# head -c4 /dev/urandom | od -A none -t x4
networking.hostId = "4750e4b8";
boot.loader.systemd-boot.enable = true;
boot.loader.efi.canTouchEfiVariables = true;
boot.tmpOnTmpfs = true; # make /tmp a tmpfs (performance!)
networking.networkmanager.enable = true;
# Set your time zone.
time.timeZone = "Europe/Berlin";
environment.systemPackages = with pkgs; [
vim
wget
htop
silver-searcher
];
environment.extraInit = ''
# use vi shortcuts
# ----------------
set -o vi
EDITOR=vim
'';
services.openssh.enable = true;
users.users.root.openssh.authorizedKeys.keys = [
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC6uza62+Go9sBFs3XZE2OkugBv9PJ7Yv8ebCskE5WYPcahMZIKkQw+zkGI8EGzOPJhQEv2xk+XBf2VOzj0Fto4nh8X5+Llb1nM+YxQPk1SVlwbNAlhh24L1w2vKtBtMy277MF4EP+caGceYP6gki5+DzlPUSdFSAEFFWgN1WPkiyUii15Xi3QuCMR8F18dbwVUYbT11vwNhdiAXWphrQG+yPguALBGR+21JM6fffOln3BhoDUp2poVc5Qe2EBuUbRUV3/fOU4HwWVKZ7KCFvLZBSVFutXCj5HuNWJ5T3RuuxJSmY5lYuFZx9gD+n+DAEJt30iXWcaJlmUqQB5awcB1S2d9pJ141V4vjiCMKUJHIdspFrI23rFNYD9k2ZXDA8VOnQE33BzmgF9xOVh6qr4G0oEpsNqJoKybVTUeSyl4+ifzdQANouvySgLJV/pcqaxX1srSDIUlcM2vDMWAs3ryCa0aAlmAVZIHgRhh6wa+IXW8gIYt+5biPWUuihJ4zGBEwkyVXXf2xsecMWCAGPWPDL0/fBfY9krNfC5M2sqxey2ShFIq+R/wMdaI7yVjUCF2QIUNiIdFbJL6bDrDyHnEXJJN+rAo23jUoTZZRv7Jq3DB/A5H7a73VCcblZyUmwMSlpg3wos7pdw5Ctta3zQPoxoAKGS1uZ+yTeZbPMmdbw== contact@ingolf-wagner.de"
];
# This value determines the NixOS release from which the default
# settings for stateful data, like file locations and database versions
# on your system were taken. Its perfectly fine and recommended to leave
# this value at the release version of the first install of this system.
# Before changing this value read the documentation for this option
# (e.g. man configuration.nix or on https://nixos.org/nixos/options.html).
system.stateVersion = "22.11"; # Did you read the comment?
}

View file

@ -0,0 +1,19 @@
# remote installation iso
- `./config.nix` : to generate the installation image
- `./remote-service.nix` : tor configuration you have to start on your machine.
## Steps
- import `./remote-service.nix` in your `/etc/nixos/configuration.nix`
- `nixos-rebuild switch`
- run `remote-install-get-hiddenReceiver` and enter the result in `./config.nix`
as `hiddenReceiver`
- set the public key in `./config.nix`
- run `nixos-generate -f install-iso -c ./config.nix`
- prepare the usb stick : `sudo if=<path of the iso> of=/dev/<device> bs=4096`
- boot the usb-stick at the new machine
- run `remote-install-start-service`
- after some time you will see a you can use to login to the new machine.
Now you can do the normal installations procedure.

View file

@ -0,0 +1,35 @@
{ pkgs, lib, ... }:
let
remote-access = import ../lib/remote-access.nix {
# cat ~/.ssh/id_rsa.pub
publicSshKey = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC6uza62+Go9sBFs3XZE2OkugBv9PJ7Yv8ebCskE5WYPcahMZIKkQw+zkGI8EGzOPJhQEv2xk+XBf2VOzj0Fto4nh8X5+Llb1nM+YxQPk1SVlwbNAlhh24L1w2vKtBtMy277MF4EP+caGceYP6gki5+DzlPUSdFSAEFFWgN1WPkiyUii15Xi3QuCMR8F18dbwVUYbT11vwNhdiAXWphrQG+yPguALBGR+21JM6fffOln3BhoDUp2poVc5Qe2EBuUbRUV3/fOU4HwWVKZ7KCFvLZBSVFutXCj5HuNWJ5T3RuuxJSmY5lYuFZx9gD+n+DAEJt30iXWcaJlmUqQB5awcB1S2d9pJ141V4vjiCMKUJHIdspFrI23rFNYD9k2ZXDA8VOnQE33BzmgF9xOVh6qr4G0oEpsNqJoKybVTUeSyl4+ifzdQANouvySgLJV/pcqaxX1srSDIUlcM2vDMWAs3ryCa0aAlmAVZIHgRhh6wa+IXW8gIYt+5biPWUuihJ4zGBEwkyVXXf2xsecMWCAGPWPDL0/fBfY9krNfC5M2sqxey2ShFIq+R/wMdaI7yVjUCF2QIUNiIdFbJL6bDrDyHnEXJJN+rAo23jUoTZZRv7Jq3DB/A5H7a73VCcblZyUmwMSlpg3wos7pdw5Ctta3zQPoxoAKGS1uZ+yTeZbPMmdbw==";
# remote-install-get-hiddenReceiver
hiddenReceiver = "";
};
in
{
imports = [ remote-access ];
# network configuration
# ---------------------
# no wifi
#networking.networkmanager.enable = true;
# wifi
networking.wireless.enable = true;
networking.wireless.networks."ssid".psk = "password";
# configuration
environment.extraInit = ''
# use vi shortcuts
# ----------------
set -o vi
EDITOR=vim
'';
}

View file

@ -0,0 +1,18 @@
# installs scripts and tor to provide an announcement service for nixos-remote installation.
{
services.tor = {
enable = true;
client.enable = true;
relay.onionServices.liveos.map = [ { port = 1337; } ];
};
environment.systemPackages = [
(pkgs.writeShellScriptBin "remote-install-start-service" ''
echo "starting announcment server to receive remote-install iso onion id"
${pkgs.nmap}/bin/ncat -k -l -p 1337
'')
(pkgs.writeShellScriptBin "remote-install-get-hiddenReceiver" ''
sudo cat /var/lib/tor/onion/liveos/hostname
'')
];
}

View file

@ -0,0 +1,34 @@
# Create a easy to use Image for translation
using [nixos-generators](https://github.com/nix-community/nixos-generators).
## generate easy to use iso
```
nixos-generate -f install-iso -c config.nix
```
## generate vmware image
```
nixos-generate -f vmware -c config.nix
```
## run locally using qcow
```
nixos-generate --run -c config.nix
```
## how to install nixos-generators
```
nix-shell -I nixpkgs=channel:nixos-unstable -p nixos-generators
```
## how to start download service
```
iptables -F # kill firewall
python -m http.server 80 # nix-shell -p python3Full
```

View file

@ -0,0 +1,22 @@
{
pkgs,
lib,
config,
...
}:
{
users.users.mainUser.extraGroups = [
"audio"
"pipewire"
];
hardware.pulseaudio = {
enable = true;
package = pkgs.pulseaudioFull;
};
environment.systemPackages = with pkgs; [
alsaUtils
pavucontrol
];
}

View file

@ -0,0 +1,63 @@
{
pkgs,
lib,
config,
...
}:
{
imports = [
./x11.nix
./audio.nix
];
#networking.networkmanager.enable = true;
#networking.wireless.enable = false;
# configuration
environment.extraInit = ''
# use vi shortcuts
# ----------------
set -o vi
EDITOR=vim
'';
services.xserver.displayManager.sessionCommands = ''
${pkgs.mumble}/bin/mumble mumble://name@lassul.us/party/hard &
${pkgs.vlc}/bin/vlc &
${pkgs.pavucontrol}/bin/pavucontrol &
'';
networking.hostName = "translate";
programs.bash = {
enableCompletion = true;
interactiveShellInit = ''
HISTCONTROL='erasedups:ignorespace'
HISTSIZE=65536
HISTFILESIZE=$HISTSIZE
shopt -s checkhash
shopt -s histappend histreedit histverify
shopt -s no_empty_cmd_completion
complete -d cd
'';
promptInit = ''
if test $UID = 0; then
PS1='\[\033[1;31m\]\w\[\033[0m\] '
PROMPT_COMMAND='echo -ne "\033]0;$$ $USER@$PWD\007"'
elif test $UID = 1337; then
PS1='\[\033[1;32m\]\w\[\033[0m\] '
PROMPT_COMMAND='echo -ne "\033]0;$$ $PWD\007"'
else
PS1='\[\033[1;33m\]\u@\w\[\033[0m\] '
PROMPT_COMMAND='echo -ne "\033]0;$$ $USER@$PWD\007"'
fi
if test -n "$SSH_CLIENT"; then
PS1='\[\033[35m\]\h'" $PS1"
PROMPT_COMMAND='echo -ne "\033]0;$$ $HOSTNAME $USER@$PWD\007"'
fi
'';
};
}

View file

@ -0,0 +1,60 @@
{
pkgs,
config,
lib,
...
}:
{
services.xserver = {
enable = true;
desktopManager = {
xterm.enable = false;
xfce.enable = true;
};
displayManager = {
defaultSession = "xfce";
sddm = {
enable = true;
autoLogin = {
enable = true;
relogin = true;
user = config.users.users.mainUser.name;
};
};
sessionCommands = ''
${pkgs.mumble}/bin/mumble &
${pkgs.vlc}/bin/vlc &
${pkgs.pavucontrol}/bin/pavucontrol &
'';
};
# mouse/touchpad
# --------------
libinput = {
enable = true;
disableWhileTyping = true;
tapping = true;
scrollMethod = "twofinger";
accelSpeed = "2";
};
};
users.users.mainUser = {
isNormalUser = true;
name = "translator";
uid = 1001;
initialPassword = "translate";
};
# Packages
# --------
environment.systemPackages = with pkgs; [
flameshot
pavucontrol
mumble
vlc
];
}

View file

@ -0,0 +1,58 @@
{
config,
lib,
pkgs,
...
}:
{
imports = [
# Include the results of the hardware scan.
./hardware-configuration.nix
];
networking.hostName = "nixos";
# grub configuraton
# -----------------
boot.loader.grub.enable = true;
boot.loader.grub.efiSupport = true;
boot.loader.grub.device = "/dev/sdb";
boot.loader.grub.efiInstallAsRemovable = true;
boot.tmpOnTmpfs = true;
networking.networkmanager.enable = true;
# Set your time zone.
time.timeZone = "Europe/Berlin";
environment.systemPackages = with pkgs; [
vim
wget
htop
silver-searcher
];
environment.extraInit = ''
# use vi shortcuts
# ----------------
set -o vi
EDITOR=vim
'';
services.openssh.enable = true;
users.users.root.openssh.authorizedKeys.keys = [
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC6uza62+Go9sBFs3XZE2OkugBv9PJ7Yv8ebCskE5WYPcahMZIKkQw+zkGI8EGzOPJhQEv2xk+XBf2VOzj0Fto4nh8X5+Llb1nM+YxQPk1SVlwbNAlhh24L1w2vKtBtMy277MF4EP+caGceYP6gki5+DzlPUSdFSAEFFWgN1WPkiyUii15Xi3QuCMR8F18dbwVUYbT11vwNhdiAXWphrQG+yPguALBGR+21JM6fffOln3BhoDUp2poVc5Qe2EBuUbRUV3/fOU4HwWVKZ7KCFvLZBSVFutXCj5HuNWJ5T3RuuxJSmY5lYuFZx9gD+n+DAEJt30iXWcaJlmUqQB5awcB1S2d9pJ141V4vjiCMKUJHIdspFrI23rFNYD9k2ZXDA8VOnQE33BzmgF9xOVh6qr4G0oEpsNqJoKybVTUeSyl4+ifzdQANouvySgLJV/pcqaxX1srSDIUlcM2vDMWAs3ryCa0aAlmAVZIHgRhh6wa+IXW8gIYt+5biPWUuihJ4zGBEwkyVXXf2xsecMWCAGPWPDL0/fBfY9krNfC5M2sqxey2ShFIq+R/wMdaI7yVjUCF2QIUNiIdFbJL6bDrDyHnEXJJN+rAo23jUoTZZRv7Jq3DB/A5H7a73VCcblZyUmwMSlpg3wos7pdw5Ctta3zQPoxoAKGS1uZ+yTeZbPMmdbw== contact@ingolf-wagner.de"
];
# This value determines the NixOS release from which the default
# settings for stateful data, like file locations and database versions
# on your system were taken. Its perfectly fine and recommended to leave
# this value at the release version of the first install of this system.
# Before changing this value read the documentation for this option
# (e.g. man configuration.nix or on https://nixos.org/nixos/options.html).
system.stateVersion = "22.11"; # Did you read the comment?
}

View file

@ -0,0 +1,14 @@
# To Fix Windows Issues
## reset password
- use `sfdisk -l` to list partitions
- find the partition which holds `Windows/System32/config`
- mount it `mkdir -p /media/sda2; mount /dev/sda2 /media/sda2`
- `cd /media/sda2/Windows/System32/config`
- list SAM database `chntpw -l SAM`
- edit SAM database `chntpw -i SAM`
- reboot
more details
[here](https://opensource.com/article/18/3/how-reset-windows-password-linux).

View file

@ -0,0 +1,51 @@
# NixOS livesystem to reset windows passwords
# Step by step guide : https://opensource.com/article/18/3/how-reset-windows-password-linux
# $ nixos-generator -f iso -c config.nix
{ pkgs, ... }:
let
wifi = {
ssid = "";
plainTextPassword = "";
};
remote-access = import ../lib/remote-access.nix {
publicSshKey = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC6uza62+Go9sBFs3XZE2OkugBv9PJ7Yv8ebCskE5WYPcahMZIKkQw+zkGI8EGzOPJhQEv2xk+XBf2VOzj0Fto4nh8X5+Llb1nM+YxQPk1SVlwbNAlhh24L1w2vKtBtMy277MF4EP+caGceYP6gki5+DzlPUSdFSAEFFWgN1WPkiyUii15Xi3QuCMR8F18dbwVUYbT11vwNhdiAXWphrQG+yPguALBGR+21JM6fffOln3BhoDUp2poVc5Qe2EBuUbRUV3/fOU4HwWVKZ7KCFvLZBSVFutXCj5HuNWJ5T3RuuxJSmY5lYuFZx9gD+n+DAEJt30iXWcaJlmUqQB5awcB1S2d9pJ141V4vjiCMKUJHIdspFrI23rFNYD9k2ZXDA8VOnQE33BzmgF9xOVh6qr4G0oEpsNqJoKybVTUeSyl4+ifzdQANouvySgLJV/pcqaxX1srSDIUlcM2vDMWAs3ryCa0aAlmAVZIHgRhh6wa+IXW8gIYt+5biPWUuihJ4zGBEwkyVXXf2xsecMWCAGPWPDL0/fBfY9krNfC5M2sqxey2ShFIq+R/wMdaI7yVjUCF2QIUNiIdFbJL6bDrDyHnEXJJN+rAo23jUoTZZRv7Jq3DB/A5H7a73VCcblZyUmwMSlpg3wos7pdw5Ctta3zQPoxoAKGS1uZ+yTeZbPMmdbw==";
hiddenReceiver = "";
};
in
{
imports = [ remote-access ];
environment.systemPackages = [
pkgs.chntpw
pkgs.ntfs3g
];
networking.dhcpcd.enable = true;
networking.wireless = {
enable = true;
networks."${wifi.ssid}".psk = wifi.plainTextPassword;
};
environment.extraInit = ''
# use vi shortcuts
# ----------------
set -o vi
EDITOR=vim
'';
services.xserver = {
enable = true;
displayManager.auto.enable = true;
desktopManager = {
default = "xfce";
xterm.enable = false;
xfce.enable = true;
xfce.extraSessionCommands = ''
${pkgs.midori}/bin/midori https://opensource.com/article/18/3/how-reset-windows-password-linux &
${pkgs.xfce.terminal}/bin/xfce4-terminal &
'';
};
};
}

75
images/yubikey-image.nix Normal file
View file

@ -0,0 +1,75 @@
# NixOS livesystem to generate yubikeys in an air-gapped manner
# screenshot: https://dl.thalheim.io/wmxIqucOEo2xuLk0Ut45fQ/yubikey-live-system.png
# $ nix-shell -p nixos-generate --run "nixos-generate -f iso -c yubikey-image.nix"
{ pkgs, ... }:
let
guide = pkgs.stdenv.mkDerivation {
name = "yubikey-guide-2019-01-21.html";
src = pkgs.fetchFromGitHub {
owner = "drduh";
repo = "YubiKey-Guide";
rev = "035d98ebbed54a0218ccbf23905054d32f97508e";
sha256 = "0rzy06a5xgfjpaklxdgrxml24d0vhk78lb577l3z4x7a2p32dbyq";
};
buildInputs = [ pkgs.pandoc ];
installPhase = "pandoc --highlight-style pygments -s --toc README.md -o $out";
};
in
{
environment.interactiveShellInit = ''
export GNUPGHOME=/run/user/$(id -u)/gnupghome
if [ ! -d $GNUPGHOME ]; then
mkdir $GNUPGHOME
fi
cp ${
pkgs.fetchurl {
url = "https://raw.githubusercontent.com/drduh/config/662c16404eef04f506a6a208f1253fee2f4895d9/gpg.conf";
sha256 = "118fmrsn28fz629y7wwwcx7r1wfn59h3mqz1snyhf8b5yh0sb8la";
}
} "$GNUPGHOME/gpg.conf"
echo "\$GNUPGHOME has been set up for you. Generated keys will be in $GNUPGHOME."
'';
environment.systemPackages = with pkgs; [
yubikey-personalization
yubikey-personalization-gui
yubikey-manager
yubikey-manager-qt
cryptsetup
pwgen
midori
paperkey
gnupg
ctmg
];
services.udev.packages = with pkgs; [ yubikey-personalization ];
services.pcscd.enable = true;
users.extraUsers.root.initialHashedPassword = "";
# make sure we are air-gapped
networking.wireless.enable = false;
networking.dhcpcd.enable = false;
services.getty.helpLine = "The 'root' account has an empty password.";
services.displayManager = {
defaultSession = "xfce";
autoLogin = {
enable = true;
user = "root";
};
};
services.xserver = {
enable = true;
desktopManager = {
xterm.enable = false;
xfce.enable = true;
};
displayManager = {
sessionCommands = ''
${pkgs.midori}/bin/midori ${guide} &
'';
};
};
}

View file

@ -20,15 +20,10 @@
./37c3.nix ./37c3.nix
./topology.nix ./topology.nix
./ssh-chungus.nix
./ssh-cherry.nix
./ferdium.nix
]; ];
time.timeZone = lib.mkForce "Asia/Bangkok"; #time.timeZone = lib.mkForce "Asia/Bangkok";
#time.timeZone = lib.mkForce "Asia/Tokyo"; time.timeZone = lib.mkForce "Asia/Tokyo";
#clan.core.facts.services = #clan.core.facts.services =
# let # let
@ -59,8 +54,9 @@
components.network.wifi.enable = true; components.network.wifi.enable = true;
components.terminal.enable = true; components.terminal.enable = true;
telemetry.enable = true; components.monitor.enable = true;
telemetry.opentelemetry.exporter.endpoint = "10.100.0.1:4317"; # orbi components.monitor.opentelemetry.exporter.endpoint = "10.100.0.1:4317"; # orbi
#components.monitor.opentelemetry.exporter.debug = "logs";
home-manager.users.mainUser.home.sessionPath = [ "$HOME/.timewarrior/scripts" ]; home-manager.users.mainUser.home.sessionPath = [ "$HOME/.timewarrior/scripts" ];
# todo: move to homemanager # todo: move to homemanager
@ -70,20 +66,24 @@
"terranix" "terranix"
"my_github" "my_github"
"logseq" "logseq"
#"trello" # todo make it work #"nextcloud-deck"
]; ];
log_level = "INFO"; log_level = "INFO";
static_fields = [ "priority" ]; static_fields = [ "priority" ];
merge_annotations = false; merge_annotations = false;
taskrc = pkgs.writeText "taskrc" ''
data.location=$HOME/.bugwarrior
'';
}; };
trello = { nextcloud-deck = {
service = "trello"; service = "deck";
token = "@oracle:eval:${pkgs.pass}/bin/pass show bugwarrior/trello/token"; base_uri = "https://nextcloud.ingolf-wagner.de";
add_tags = "bugwarrior_pull,trello"; username = "palo";
password = "@oracle:eval:${pkgs.pass}/bin/pass show bugwarrior/nextcloud-deck/palo";
}; };
logseq = { logseq = {
service = "logseq"; service = "logseq";
add_tags = "bugwarrior_pull,logseq"; add_tags = "bugwarrior,logseq";
description_template = "{{logseqtitle}}"; description_template = "{{logseqtitle}}";
}; };
terranix = { terranix = {
@ -93,31 +93,29 @@
username = "mrVanDalo"; username = "mrVanDalo";
default_priority = ""; default_priority = "";
description_template = "{{githubtitle}} {{githuburl}}"; description_template = "{{githubtitle}} {{githuburl}}";
add_tags = "bugwarrior_pull,github"; add_tags = "bugwarrior,github";
project_template = "terranix"; project_template = "terranix";
involved_issues = true; involved_issues = true;
query = "org:terranix is:open"; query = "org:terranix is:open";
include_user_issues = false; include_user_issues = false;
include_user_repos = false; include_user_repos = false;
}; };
# todo: add gmail
# todo: add trello
my_github = { my_github = {
service = "github"; service = "github";
login = "mrVanDalo"; login = "mrVanDalo";
token = "@oracle:eval:${pkgs.pass}/bin/pass development/github/mrVanDalo/bugwarriorAccessToken"; token = "@oracle:eval:${pkgs.pass}/bin/pass development/github/mrVanDalo/bugwarriorAccessToken";
username = "mrVanDalo"; username = "mrVanDalo";
description_template = "{{githubtitle}} {{githuburl}}"; description_template = "{{githubtitle}} {{githuburl}}";
add_tags = "bugwarrior_pull,github"; add_tags = "bugwarrior,github";
include_user_issues = true; include_user_issues = true;
include_user_repos = true; include_user_repos = true;
exclude_repos = [ exclude_repos = [
"azubi" "azubi"
"csv-to-qif" "csv-to-qif"
"stepp0r" "stepp0r"
"nix-shell-mix"
]; ];
}; };
# todo : add github issues
}; };
users.users.mainUser.extraGroups = [ "pipewire" ]; users.users.mainUser.extraGroups = [ "pipewire" ];

View file

@ -1 +0,0 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINjJvuEviWlnptuKqA8MQ3QVVdvEGaez1VmShaj56QTg root@cherry

View file

@ -1 +0,0 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDhrmPLOY18azllQEsK+je42aaqnpHm0k3f0bjQEnQXW palo@cherry

View file

@ -1,49 +0,0 @@
{
config,
pkgs,
lib,
...
}:
with lib;
let
ferdium = pkgs.writeShellScriptBin "ferdium" ''
/var/run/wrappers/bin/sudo -u ferdium -i ${pkgs.ferdium}/bin/ferdium $@
'';
in
{
environment.systemPackages = [
ferdium
(pkgs.makeDesktopItem {
terminal = false;
exec = "${ferdium}/bin/ferdium";
name = "ferdium";
desktopName = "Ferdium";
icon = "${pkgs.ferdium}/share/icons/hicolor/512x512/apps/ferdium.png";
})
pkgs.xorg.xhost
];
users.users.ferdium = {
isNormalUser = false;
isSystemUser = true;
home = "/home/ferdium";
createHome = true;
extraGroups = [
"audio"
"input"
"video"
"pipewire"
];
group = "ferdium";
shell = pkgs.bashInteractive;
};
users.groups.ferdium = { };
security.sudo.extraConfig = ''
${config.users.extraUsers.mainUser.name} ALL=(ferdium) NOPASSWD: ALL
'';
}

View file

@ -5,13 +5,17 @@
hardware.graphics.enable = true; hardware.graphics.enable = true;
hardware.graphics.extraPackages = with pkgs; [ hardware.graphics.extraPackages = with pkgs; [
intel-media-driver # LIBVA_DRIVER_NAME=iHD intel-media-driver # LIBVA_DRIVER_NAME=iHD
#intel-vaapi-driver # For older processors. LIBVA_DRIVER_NAME=i965 #vaapi-intel-hybrid
intel-vaapi-driver # For older processors. LIBVA_DRIVER_NAME=i965
#vaapiIntel # LIBVA_DRIVER_NAME=i965 (older but works better for Firefox/Chromium)
#vaapiVdpau
#libvdpau-va-gl
]; ];
#hardware.graphics.enable32Bit = true; hardware.graphics.enable32Bit = true;
#hardware.graphics.extraPackages32 = with pkgs.pkgsi686Linux; [ intel-vaapi-driver ]; hardware.graphics.extraPackages32 = with pkgs.pkgsi686Linux; [ intel-vaapi-driver ];
environment.systemPackages = [ environment.sessionVariables = {
pkgs.libva-utils # for CLI tools like : vainfo LIBVA_DRIVER_NAME = "i965";
]; }; # Optionally, set the environment variable
} }

View file

@ -1,18 +1,10 @@
{ pkgs, config, ... }: { config, ... }:
{ {
tinc.private.enable = true; tinc.private.enable = true;
tinc.private.ipv4 = "10.23.42.29"; tinc.private.ipv4 = "10.23.42.29";
healthchecks.localCommands.ping-private = pkgs.writers.writeBash "ping-private" ''
ping -c 1 -W 5 ${config.tinc.private.ipv4}
'';
tinc.secret.enable = true; tinc.secret.enable = true;
tinc.secret.ipv4 = "10.123.42.29"; tinc.secret.ipv4 = "10.123.42.29";
healthchecks.localCommands.ping-secret = pkgs.writers.writeBash "ping-secret" ''
ping -c 1 -W 5 ${config.tinc.secret.ipv4}
'';
} }

View file

@ -1,9 +1,4 @@
{ { config, factsGenerator, ... }:
config,
factsGenerator,
pkgs,
...
}:
{ {
clan.core.facts.services.tinc_retiolum = factsGenerator.tinc { name = "retiolum"; }; clan.core.facts.services.tinc_retiolum = factsGenerator.tinc { name = "retiolum"; };
@ -11,10 +6,6 @@
networking.retiolum.port = 720; networking.retiolum.port = 720;
networking.retiolum.nodename = "cherry"; networking.retiolum.nodename = "cherry";
healthchecks.localCommands.ping-retiolum = pkgs.writers.writeBash "ping-retiolum" ''
ping -c 1 -W 5 ${config.networking.retiolum.nodename}.r
'';
services.tinc.networks.retiolum = { services.tinc.networks.retiolum = {
ed25519PrivateKeyFile = ed25519PrivateKeyFile =
config.clan.core.facts.services.tinc_retiolum.secret."tinc.retiolum.ed25519_key.priv".path; config.clan.core.facts.services.tinc_retiolum.secret."tinc.retiolum.ed25519_key.priv".path;

View file

@ -1,16 +1,10 @@
{ {
pkgs,
config, config,
factsGenerator, factsGenerator,
clanLib, clanLib,
... ...
}: }:
{ {
healthchecks.localCommands.ping-wg0 = pkgs.writers.writeBash "ping-wg0" ''
ping -c 1 -W 5 ${config.clan.core.facts.services.wireguard_ip.public."wireguard.wg0.ip".value}
'';
networking.firewall.allowedUDPPorts = [ 51820 ]; networking.firewall.allowedUDPPorts = [ 51820 ];
clan.core.facts.services.wireguard = factsGenerator.wireguard { name = "wg0"; }; clan.core.facts.services.wireguard = factsGenerator.wireguard { name = "wg0"; };
clan.core.facts.services.wireguard_ip = factsGenerator.public { clan.core.facts.services.wireguard_ip = factsGenerator.public {

View file

@ -1,46 +0,0 @@
{
config,
factsGenerator,
lib,
...
}:
let
hostname = "cherry";
in
{
# Defines the root SSH key to be used exclusively for accessing a secure machine.
# The need for this arises because deployments using the 'clan' command-line tool (e.g. 'clan machines update')
# make use of the 'ssh -A' option, which forwards the SSH agent from the client to the target machine.
# If the target machine becomes compromised by an attacker,
# they could potentially leverage the forwarded SSH agent to access the secure machine.
# This file prevents that scenario by restricting access strictly to the defined SSH key,
# which is only used to access the secure machine, so no other ssh-agent will contain this ssh key
clan.core.facts.services."ssh.root.${hostname}" = factsGenerator.ssh {
name = "root.${hostname}";
};
systemd.tmpfiles.settings.mainUser = {
"/run/facts/ssh.root.${hostname}.id_ed25519"."C+" = {
user = config.users.users.mainUser.name;
group = config.users.users.mainUser.group;
mode = "400";
argument =
config.clan.core.facts.services."ssh.root.${hostname}".secret."ssh.root.${hostname}.id_ed25519".path;
};
};
home-manager.users.mainUser.programs.ssh.matchBlocks =
lib.genAttrs
[
"${hostname}.bear"
"${hostname}.private"
"${hostname}.wg0"
]
(name: {
identityFile = "/run/facts/ssh.root.${hostname}.id_ed25519";
identitiesOnly = true;
});
}

View file

@ -1,46 +0,0 @@
{
config,
factsGenerator,
lib,
...
}:
let
hostname = "chungus";
in
{
# Defines the root SSH key to be used exclusively for accessing a secure machine.
# The need for this arises because deployments using the 'clan' command-line tool (e.g. 'clan machines update')
# make use of the 'ssh -A' option, which forwards the SSH agent from the client to the target machine.
# If the target machine becomes compromised by an attacker,
# they could potentially leverage the forwarded SSH agent to access the secure machine.
# This file prevents that scenario by restricting access strictly to the defined SSH key,
# which is only used to access the secure machine, so no other ssh-agent will contain this ssh key
clan.core.facts.services."ssh.root.${hostname}" = factsGenerator.ssh {
name = "root.${hostname}";
};
systemd.tmpfiles.settings.mainUser = {
"/run/facts/ssh.root.${hostname}.id_ed25519"."C+" = {
user = config.users.users.mainUser.name;
group = config.users.users.mainUser.group;
mode = "400";
argument =
config.clan.core.facts.services."ssh.root.${hostname}".secret."ssh.root.${hostname}.id_ed25519".path;
};
};
home-manager.users.mainUser.programs.ssh.matchBlocks =
lib.genAttrs
[
"${hostname}.bear"
"${hostname}.private"
"${hostname}.wg0"
]
(name: {
identityFile = "/run/facts/ssh.root.${hostname}.id_ed25519";
identitiesOnly = true;
});
}

View file

@ -6,7 +6,7 @@
}: }:
{ {
healthchecks.http.syncthing-gui = { verify.http.syncthing-gui = {
url = config.services.syncthing.guiAddress; url = config.services.syncthing.guiAddress;
expectedContent = "syncthing"; expectedContent = "syncthing";
}; };

View file

@ -46,9 +46,6 @@
./service-atuin.nix ./service-atuin.nix
./service-forgejo.nix ./service-forgejo.nix
./service-paperless-backup.nix
./service-paperless-healthchecks.nix
./service-paperless-tika.nix
./service-paperless.nix ./service-paperless.nix
./service-s3.nix ./service-s3.nix
#./service-taskwarrior.nix #./service-taskwarrior.nix
@ -71,14 +68,11 @@
features.boot.ssh.kernelModules = [ "e1000e" ]; features.boot.ssh.kernelModules = [ "e1000e" ];
features.boot.tor.enable = true; features.boot.tor.enable = true;
telemetry.enable = true; components.monitor.enable = true;
telemetry.opentelemetry.receiver.endpoint = "0.0.0.0:4317"; components.monitor.opentelemetry.receiver.endpoint = "0.0.0.0:4317";
networking.firewall.interfaces.wg0.allowedTCPPorts = [ 4317 ]; networking.firewall.interfaces.wg0.allowedTCPPorts = [ 4317 ];
networking.firewall.interfaces.wg0.allowedUDPPorts = [ 4317 ]; networking.firewall.interfaces.wg0.allowedUDPPorts = [ 4317 ];
healthchecks.closed.wg0.host = "10.100.0.2";
healthchecks.closed.retiolum.host = "centauri.r";
services.printing.enable = false; services.printing.enable = false;
networking.hostName = "chungus"; networking.hostName = "chungus";

View file

@ -1 +0,0 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIH5cHogtLN70T5g7b30r2RR4l6TEFB4t8O8FZ+NMUTfj paperless@chungus

View file

@ -1 +1 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJrcZBnx2h1cOlpHImuYhOu08gUdchzbKwbmOMUd54f3 rbackup@chungus ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJrcZBnx2h1cOlpHImuYhOu08gUdchzbKwbmOMUd54f3 nixbld@cream

View file

@ -1 +1 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJHGxMlaCoAyD/lIYAxEts7vyWYJ7ut4P9Cjw7mvAPiL syncoid@chungus ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJHGxMlaCoAyD/lIYAxEts7vyWYJ7ut4P9Cjw7mvAPiL nixbld@cream

View file

@ -0,0 +1 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICrdJ4EXJ0HeZXTb4AzRKQeAORBWwcawOxj4EJhV62De nixbld@cherry

View file

@ -16,5 +16,4 @@
networking.firewall.interfaces.enp0s31f6.allowedTCPPorts = [ 1883 ]; networking.firewall.interfaces.enp0s31f6.allowedTCPPorts = [ 1883 ];
networking.firewall.interfaces.wg0.allowedTCPPorts = [ 1883 ]; networking.firewall.interfaces.wg0.allowedTCPPorts = [ 1883 ];
healthchecks.closed.retiolum.ports.mqtt = [ 1883 ];
} }

View file

@ -8,11 +8,6 @@
{ {
imports = [ ./hass-mqtt.nix ]; imports = [ ./hass-mqtt.nix ];
healthchecks.closed.retiolum.ports.zigbee2mqtt = [
1337
9666
];
services.zigbee2mqtt = { services.zigbee2mqtt = {
enable = true; enable = true;
dataDir = "/srv2/zigbee2mqtt"; dataDir = "/srv2/zigbee2mqtt";

View file

@ -18,6 +18,4 @@
networking.firewall.interfaces.wg0.allowedTCPPorts = [ 8123 ]; networking.firewall.interfaces.wg0.allowedTCPPorts = [ 8123 ];
networking.firewall.interfaces.wg0.allowedUDPPorts = [ 8123 ]; networking.firewall.interfaces.wg0.allowedUDPPorts = [ 8123 ];
healthchecks.closed.retiolum.ports.hass = [ 8123 ];
} }

View file

@ -12,11 +12,6 @@
user = "media"; user = "media";
}; };
healthchecks.http.jellyfin = {
url = "flix.${config.networking.hostName}.private";
};
healthchecks.closed.retiolum.ports.jellyfin = [ 8096 ];
services.nginx = { services.nginx = {
enable = true; enable = true;
virtualHosts."flix.${config.networking.hostName}.private" = { virtualHosts."flix.${config.networking.hostName}.private" = {

View file

@ -6,13 +6,6 @@
}: }:
{ {
healthchecks.http.navidrome = {
url = "${config.networking.hostName}.wg0:${toString config.services.navidrome.settings.Port}/app/#/login";
expectedContent = "Navidrome";
};
healthchecks.closed.retiolum.ports.navidrome = [ config.services.navidrome.settings.Port ];
services.navidrome = { services.navidrome = {
enable = true; enable = true;
openFirewall = true; openFirewall = true;
@ -22,24 +15,4 @@
settings.MusicFolder = "/media/arr/lidarr"; settings.MusicFolder = "/media/arr/lidarr";
}; };
services.nginx = {
enable = true;
virtualHosts."music.${config.networking.hostName}.private" = {
serverAliases = [
"music.${config.networking.hostName}.wg0"
"music.ingolf-wagner.de"
];
locations."/" = {
recommendedProxySettings = true;
proxyWebsockets = true;
proxyPass = "http://localhost:${toString config.services.navidrome.settings.Port}";
extraConfig = ''
allow ${config.tinc.private.subnet};
allow ${config.wireguard.wg0.subnet};
deny all;
'';
};
};
};
} }

View file

@ -1,12 +1,6 @@
{ config, ... }: { config, ... }:
{ {
healthchecks.closed.retiolum.ports.share = [
137
138
139
445
];
networking.firewall.interfaces.enp0s31f6.allowedTCPPorts = [ networking.firewall.interfaces.enp0s31f6.allowedTCPPorts = [
445 445
139 139

View file

@ -6,11 +6,10 @@
}: }:
{ {
healthchecks.http.syncthing-gui = { verify.http.syncthing-gui = {
url = config.services.syncthing.guiAddress; url = config.services.syncthing.guiAddress;
expectedContent = "syncthing"; expectedContent = "syncthing";
}; };
healthchecks.closed.retiolum.ports.syncthing-gui = [ 8384 ];
services.syncthing = { services.syncthing = {
enable = true; enable = true;

View file

@ -1,18 +1,9 @@
{ { config, factsGenerator, ... }:
config,
factsGenerator,
pkgs,
...
}:
{ {
clan.core.facts.services.tinc_retiolum = factsGenerator.tinc { name = "retiolum"; }; clan.core.facts.services.tinc_retiolum = factsGenerator.tinc { name = "retiolum"; };
networking.retiolum.port = 720; networking.retiolum.port = 720;
networking.retiolum.nodename = "chungus"; networking.retiolum.nodename = "centauri";
healthchecks.localCommands.ping-retiolum = pkgs.writers.writeBash "ping-retiolum" ''
ping -c 1 -W 5 ${config.networking.retiolum.nodename}.r
'';
services.tinc.networks.retiolum = { services.tinc.networks.retiolum = {
ed25519PrivateKeyFile = ed25519PrivateKeyFile =

View file

@ -1,11 +1,6 @@
{ pkgs, config, ... }:
{ {
tinc.private.enable = true; tinc.private.enable = true;
tinc.private.ipv4 = "10.23.42.28"; tinc.private.ipv4 = "10.23.42.28";
healthchecks.localCommands.ping-private = pkgs.writers.writeBash "ping-private" ''
ping -c 1 -W 5 ${config.tinc.private.ipv4}
'';
} }

View file

@ -1,16 +1,10 @@
{ {
pkgs,
config, config,
factsGenerator, factsGenerator,
clanLib, clanLib,
... ...
}: }:
{ {
healthchecks.localCommands.ping-wg0 = pkgs.writers.writeBash "ping-wg0" ''
ping -c 1 -W 5 ${config.clan.core.facts.services.wireguard_ip.public."wireguard.wg0.ip".value}
'';
networking.firewall.allowedUDPPorts = [ 51820 ]; networking.firewall.allowedUDPPorts = [ 51820 ];
clan.core.facts.services.wireguard = factsGenerator.wireguard { name = "wg0"; }; clan.core.facts.services.wireguard = factsGenerator.wireguard { name = "wg0"; };
clan.core.facts.services.wireguard_ip = factsGenerator.public { clan.core.facts.services.wireguard_ip = factsGenerator.public {

View file

@ -1,13 +1,10 @@
{ {
pkgs,
config, config,
pkgs,
assets, assets,
... ...
}: }:
{ {
healthchecks.closed.retiolum.ports.atuin = [ config.services.atuin.port ];
services.atuin = { services.atuin = {
enable = true; enable = true;
package = pkgs.legacy_2405.atuin.overrideAttrs (_old: { package = pkgs.legacy_2405.atuin.overrideAttrs (_old: {

View file

@ -5,13 +5,11 @@
... ...
}: }:
{ {
healthchecks.http.forgejjo = { verify.http.forgejjo = {
url = "http://git.chungus.private/explore/repos"; url = "http://git.chungus.private/explore/repos";
expectedContent = "nixinate"; expectedContent = "nixinate";
}; };
healthchecks.closed.retiolum.ports.forgejo = [ config.services.forgejo.settings.server.HTTP_PORT ];
services.nginx = { services.nginx = {
enable = true; enable = true;
statusPage = true; statusPage = true;

View file

@ -1,40 +0,0 @@
{
config,
pkgs,
lib,
factsGenerator,
...
}:
{
clan.core.facts.services."paperless-ngx.borg" = factsGenerator.password { name = "borgbackup"; };
clan.core.facts.services."paperless-ngx.ssh" = factsGenerator.ssh { name = "paperless-ngx"; };
# backup
services.borgbackup.jobs."paperless-ngx" = {
paths = [ config.services.paperless.dataDir ];
repo = "root@orbi.bear:borg-${config.networking.hostName}-paperless";
compression = "auto,lzma";
startAt = "daily";
encryption = {
mode = "keyfile-blake2";
passCommand = "cat ${
toString config.clan.core.facts.services."paperless-ngx.borg".secret."password.borgbackup".path
}";
};
environment = {
BORG_RSH = "ssh -i ${
toString
config.clan.core.facts.services."paperless-ngx.ssh".secret."ssh.paperless-ngx.id_ed25519".path
}";
BORG_RELOCATED_REPO_ACCESS_IS_OK = "yes";
};
prune.keep = {
within = "3d"; # Keep all backups in the last 10 days.
weekly = 2; # Keep 8 additional end of week archives.
monthly = -1; # Keep end of month archive for every month
};
doInit = true;
};
}

View file

@ -1,16 +0,0 @@
{
config,
pkgs,
lib,
factsGenerator,
...
}:
{
healthchecks.http.paperless = {
url = "http://paperless.ingolf-wagner.de/accounts/login/?next=/";
expectedContent = "paperless.chungus.private";
};
healthchecks.closed.retiolum.ports.paperless = [ config.services.paperless.port ];
}

View file

@ -1,36 +0,0 @@
{
config,
pkgs,
lib,
...
}:
{
services.paperless = {
settings = {
PAPERLESS_TIKA_ENABLED = true;
PAPERLESS_TIKA_ENDPOINT = "http://127.0.0.1:${toString config.services.tika.port}";
PAPERLESS_TIKA_GOTENBERG_ENDPOINT = "http://127.0.0.1:${toString config.services.gotenberg.port}";
};
};
services.tika = {
enable = true;
};
services.gotenberg = {
enable = true;
timeout = "300s";
port = 3214;
};
systemd.services.gotenberg = {
environment.HOME = "/run/gotenberg";
serviceConfig = {
SystemCallFilter = lib.mkAfter [ "@chown" ]; # TODO remove when fixed (https://github.com/NixOS/nixpkgs/issues/349123)
WorkingDirectory = "/run/gotenberg";
RuntimeDirectory = "gotenberg";
};
};
}

View file

@ -2,6 +2,7 @@
config, config,
pkgs, pkgs,
lib, lib,
nixos-artwork,
... ...
}: }:
{ {
@ -9,15 +10,6 @@
services.paperless = { services.paperless = {
enable = true; enable = true;
address = "0.0.0.0"; address = "0.0.0.0";
package = pkgs.paperless-ngx.overrideAttrs (old: rec {
version = "2.13.4";
src = pkgs.fetchFromGitHub {
owner = "paperless-ngx";
repo = "paperless-ngx";
rev = "refs/tags/v${version}";
hash = "sha256-db8omhyngvenAgfGGpMAhGkgqGug/sv7AL1G+sniM/c=";
};
});
settings = { settings = {
PAPERLESS_OCR_LANGUAGE = "deu+eng"; PAPERLESS_OCR_LANGUAGE = "deu+eng";
PAPERLESS_APP_TITLE = "paperless.chungus.private"; PAPERLESS_APP_TITLE = "paperless.chungus.private";
@ -26,12 +18,7 @@
"desktop.ini" "desktop.ini"
]; ];
PAPERLESS_EMAIL_TASK_CRON = "0 */8 * * *"; # “At minute 0 past every 8th hour.” PAPERLESS_EMAIL_TASK_CRON = "0 */8 * * *"; # “At minute 0 past every 8th hour.”
#PAPERLESS_CONSUMER_DELETE_DUPLICATES = false;
# https://github.com/paperless-ngx/paperless-ngx/discussions/4047#discussioncomment-7019544
# https://github.com/paperless-ngx/paperless-ngx/issues/7383
PAPERLESS_OCR_USER_ARGS = {
"invalidate_digital_signatures" = true;
};
}; };
}; };
@ -43,6 +30,10 @@
}; };
networking.firewall.interfaces.wg0.allowedTCPPorts = [ config.services.paperless.port ]; networking.firewall.interfaces.wg0.allowedTCPPorts = [ config.services.paperless.port ];
verify.http.paperless = {
url = "http://paperless.ingolf-wagner.de/accounts/login/?next=/";
expectedContent = "paperless.chungus.private";
};
services.nginx.virtualHosts."paperless.${config.networking.hostName}.private" = { services.nginx.virtualHosts."paperless.${config.networking.hostName}.private" = {
serverAliases = [ "paperless.ingolf-wagner.de" ]; serverAliases = [ "paperless.ingolf-wagner.de" ];
@ -59,5 +50,4 @@
proxyWebsockets = true; proxyWebsockets = true;
}; };
}; };
} }

View file

@ -1,16 +1,6 @@
{ config, factsGenerator, ... }: { config, factsGenerator, ... }:
{ {
healthchecks.closed.retiolum.ports.s3 = [
9000
9001
];
healthchecks.http.s3 = {
url = "http://s3.chungus.private:9001/login";
expectedContent = "minio";
};
clan.core.facts.services.s3 = factsGenerator.password { clan.core.facts.services.s3 = factsGenerator.password {
name = "root"; name = "root";
service = "s3"; service = "s3";
@ -19,9 +9,6 @@
services.minio = { services.minio = {
enable = true; enable = true;
region = "home"; region = "home";
# this file was adjusted right after creation
# MINIO_ROOT_USER=root
# MINIO_ROOT_PASSWORD=<the password>
rootCredentialsFile = config.clan.core.facts.services.s3.secret."s3.root".path; rootCredentialsFile = config.clan.core.facts.services.s3.secret."s3.root".path;
}; };

View file

@ -6,8 +6,6 @@
}: }:
{ {
healthchecks.closed.retiolum.ports.taskwarrior-webui = [ 8080 ];
virtualisation.oci-containers = { virtualisation.oci-containers = {
containers.taskwarrior-webui = { containers.taskwarrior-webui = {
volumes = [ volumes = [

View file

@ -1,8 +1,5 @@
{ pkgs, ... }: { pkgs, ... }:
{ {
healthchecks.closed.retiolum.ports.vault = [ 8200 ];
services.vault = { services.vault = {
enable = true; enable = true;
#adress = "chungus.private:8200"; #adress = "chungus.private:8200";

View file

@ -57,11 +57,6 @@ in
source = "root@orbi.${tld}:zroot/taskchampion"; source = "root@orbi.${tld}:zroot/taskchampion";
target = "zraid/mirror/taskchampion"; # should NOT be created up front! target = "zraid/mirror/taskchampion"; # should NOT be created up front!
}; };
commands.forgejo = {
sshKey = "/run/facts/ssh.syncoid.id_ed25519";
source = "root@orbi.${tld}:zroot/forgejo";
target = "zraid/mirror/forgejo"; # should NOT be created up front!
};
commonArgs = [ commonArgs = [
# Does not create new snapshot, only transfers existing # Does not create new snapshot, only transfers existing
"--no-sync-snap" "--no-sync-snap"

View file

@ -12,10 +12,6 @@
}; };
}; };
healthchecks.http.grafana = {
url = "grafana.${config.networking.hostName}.private";
};
services.grafana = { services.grafana = {
enable = true; enable = true;
settings = { settings = {

View file

@ -21,9 +21,6 @@ let
''; '';
in in
{ {
healthchecks.closed.retiolum.ports.vault = [ 9993 ];
environment.systemPackages = [ environment.systemPackages = [
(zerotierCommand "zerotier-script-members" '' (zerotierCommand "zerotier-script-members" ''
curl "http://localhost:9993/controller/network/''${NWID}/member" -H "X-ZT1-AUTH: ''${TOKEN}" | gojq curl "http://localhost:9993/controller/network/''${NWID}/member" -H "X-ZT1-AUTH: ''${TOKEN}" | gojq

View file

@ -0,0 +1,137 @@
{
config,
pkgs,
lib,
...
}:
{
imports = [
./hardware-configuration.nix
./syncthing.nix
./network-tinc.nix
./network-tinc_retiolum.nix
./network-wireguard-wg0.nix
./network-wireguard-wg1.nix
];
system.stateVersion = "22.11";
# Use the systemd-boot EFI boot loader, not grub
boot.loader.systemd-boot.enable = true;
boot.loader.efi.canTouchEfiVariables = true;
boot.tmp.useTmpfs = true; # make /tmp a tmpfs (performance!)
components.virtualisation.enable = true;
components.gui.enable = true;
components.gui.xorg.enable = true;
components.gui.wayland.enable = false;
components.mainUser.enable = true;
components.media.enable = true;
components.media.tts-client.enable = false;
components.network.enable = true;
components.network.wifi.enable = true;
components.terminal.enable = true;
components.monitor.enable = true;
components.monitor.opentelemetry.exporter.endpoint = "10.100.0.1:4317"; # orbi
components.monitor.exporters.zfs.enable = false;
home-manager.users.mainUser.home.sessionPath = [ "$HOME/.timewarrior/scripts" ];
home-manager.users.mainUser.bugwarrior.config = {
general = {
targets = [
"terranix"
"my_github"
];
log_level = "INFO";
static_fields = [ "priority" ];
merge_annotations = false;
};
terranix = {
service = "github";
login = "mrVanDalo";
token = "@oracle:eval:${pkgs.pass}/bin/pass development/github/mrVanDalo/bugwarriorAccessToken";
username = "mrVanDalo";
default_priority = "";
description_template = "{{githubtitle}} {{githuburl}}";
add_tags = "github";
project_template = "terranix";
involved_issues = true;
query = "org:terranix is:open";
include_user_issues = false;
include_user_repos = false;
};
my_github = {
service = "github";
login = "mrVanDalo";
token = "@oracle:eval:${pkgs.pass}/bin/pass development/github/mrVanDalo/bugwarriorAccessToken";
username = "mrVanDalo";
description_template = "{{githubtitle}} {{githuburl}}";
add_tags = "github";
include_user_issues = true;
include_user_repos = true;
exclude_repos = [
"azubi"
"csv-to-qif"
"stepp0r"
];
};
# todo : add github issues
};
users.users.mainUser.extraGroups = [ "pipewire" ];
services.nginx.enable = true;
networking.hostName = "cream";
#services.flatpak.enable = true;
# make sure battery is charged in a way to live for a long time
services.power-profiles-daemon.enable = false;
services.tlp = {
enable = true;
settings = {
CPU_BOOST_ON_BAT = 0;
CPU_SCALING_GOVERNOR_ON_BATTERY = "powersave";
START_CHARGE_THRESH_BAT0 = 30;
STOP_CHARGE_THRESH_BAT0 = 85;
RUNTIME_PM_ON_BAT = "auto";
};
};
security.wrappers = {
pmount = {
source = "${pkgs.pmount}/bin/pmount";
setuid = true;
owner = "root";
group = "root";
};
pumount = {
source = "${pkgs.pmount}/bin/pumount";
setuid = true;
owner = "root";
group = "root";
};
};
services.printing.enable = true;
samba-share = {
enable = false;
folders = {
share = "/home/share";
video = "/home/video-material";
};
};
# for congress and streaming
hardware.graphics.enable = true;
}

View file

@ -0,0 +1 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICPz1SRSthwDEmXZXcBMi0FZhqgZxF7i1lDcGT534Gy7 nixbld@cherry

View file

@ -0,0 +1 @@
ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIArokGctZ2VLf92FhfE8pHzkx/bjz0/J1QjeaGgDSj1s ingolf.wagner@jobrad.org

View file

@ -0,0 +1 @@
VQSHJ6K-MUWCTPJ-LJINXBP-7O244YK-TIY3D5B-T6PU7BY-2NPWPXI-HO2Q5Q6

View file

@ -0,0 +1 @@
b8xU34/kYj3LxYfdrozDnpmXt25mLbYsnhUxgvFz2CG

View file

@ -0,0 +1,13 @@
-----BEGIN RSA PUBLIC KEY-----
MIICCgKCAgEAqFNvj8lg1ET9rala1W7RSi+ObQoN8JoQ7fTZ63XBycDr3bEkubGk
vIbLFFsdhIiMrJG6eRr25EiKQxew6Pb4HwwqjCJugHzSELHgiWN93Dx5hgl+EXV2
8EYQ3xWO+8ZH4PQsfUMqxBx553UMOiDZ0L4OE275z5XuLyDXnjXqv2WCU7qY57lt
MlJ3BFOhtWz7wl7fOu8rzalVuDLc/yp3KKhzLxr7lUUIHOZOT8EsoSAOiy+qUq6Q
K9JrHcTGP3FmBucY5bSyVQxbX75tLqBiadTvlcx4n0mHTbCyHjC2tIHmN2MtUhsS
Qw4uITn7NTd/c9H89Le2Z3Z01sRNEo1eZ3ru0JlYqUEL0sE2lAtPJWRgRePEzCWs
s8GN6LFrAvl8T/FmW6XFzxGBViOhFqP61HO17KhALwl5kVXpUMFKxbn1/ZXP5Ono
+h/Aaph56D/EZAFVvAPR7xx/Cp+cjOvKaKLgnZ5vG3VrjmbL9KkDtHiiiHcKC/Z8
OrOirkxalJJd2bMYpIUO/7TYEUCQzni3ollYae3myFuwRIeiqNnVjtHiQnPMEYmn
pjgWmvtYjvPLJkpnnP96nn+FI7FXqro8nY59COaIne3m0SxPo6JrGwugvYuLeOJS
96v4hcSTrB3LEaH49a3vaFKQUsEOFCCTc6Qx+/ejgV/3cEzQjDblep8CAwEAAQ==
-----END RSA PUBLIC KEY-----

View file

@ -0,0 +1 @@
B3EKYRxqFjIGR2VYajjDqX0gltPJNwcno5PUhafKWKB

View file

@ -0,0 +1,13 @@
-----BEGIN RSA PUBLIC KEY-----
MIICCgKCAgEAnzhalF1rqLdSsT6HAGuQ6x1kC9Ty3FjoKR2Y5RCO9YIyEgRE8qfR
jkne+wIIleODUDMZYuvUe9X5hm8w6wDzxlwCPitwhDlOxoSBnXfbL6YL9rZBn3lC
JFkpEPtAJYnfM64R4/UjSndHlCVuH7tltD/1tmfG6IbSsIeDVz+pWZdEmBJfCiDl
aqP2gb1oIwe9TgJX2EC2ugW+6Jh9oPNIOP2Q5eLvty5WPhUSGQDWVMr5u0Rgc1oU
hhAvrfue7MFqUwX+o0Zq93eVAu/51dhTtqwwVgZVlHK7Wkak4yTRGPAP9v9vbKeK
7GpQuvbiI5OphhSFPjyCN1XMqVgFxqsnLsflIPbQdxCkBgFxhmNf31BDlXWHWD5e
7BfFYc1tZFcEWKhguoCSesJvh1BVsiZzfya96lGd/+ttcKBUKX4tdznEQsV/MVhC
cVnQD6k8PN4BIWVJtcq5oM9h6Yt6avtv8TeuaLp/Janco4JmYYFIfRETnz6ye/fG
OiKJnGQ1yohSE6n8ZUK1QYdYezZfI8QhF7GHK7he9x13L9xmXoybV+REXlRvh4S2
bi9lWTKhQVIHb/qLIdQuaAnK1xg4tdNzL43KEpPstGlAnG8uUNL8hCJL3m220RPK
lEbtLhayRzQ9zgj/hBQZa/hMGGyiqV1hiTbEEWAusJdGTUPYhjAelOkCAwEAAQ==
-----END RSA PUBLIC KEY-----

View file

@ -0,0 +1 @@
nrSEGYNGKiEdXaVAnGkb7ihBnKf/PcpGJEvn1NMLNoB

View file

@ -0,0 +1,13 @@
-----BEGIN RSA PUBLIC KEY-----
MIICCgKCAgEA8xuGW5yLty6aWYhhBK/T+7TmP3QsU2Y3ew7KvSNLhuxQc63CwzSA
eJpDHYgoLujoi6VGd1L+I7G3Imy0wF5FsFgsFKY7wTbSL/Y/6gc6wm7yL/gYebH7
zm//n6wqMSlrFKMpnWQj9x43f8eseMl0D3rlXYpE7HfKZI3sPTNexUrWRsqVFUFN
Jmi5SQHIWuczWh0EGUaSc8ueMYHh9WkzDHS7Y8UbLy7bSclRSPxIp7D87513n7YT
0OH7dEDD/is0uoRHQg+TpgFm9HcJeX5ULmsv1x6gssm7D7r+nXF7ATNJrKO0h78O
hAS7kfugHFzrYQP/NRxNLRETSuyL4kQS5WiVfdQWIi+UJtasCSPH4hT34DBPN8vX
GC0nneV9RztnTBUpuIH/BsBOmHBHwLTb9miN6dTyq1MAL/NsiO8+zgxE4gJnownR
r6Dn3fF2bGX9ij9/7WUyi9hez+3c5q3CsG0CDccDsvgkFc4nDdWxmwqKtIg4hM7x
M6FA5W9g1hgupcIdRt/+dKp+nwGH5TYAXa9+XFwfSuegds2hZFluEhmgfet2tB26
wA4w6+mNcTzikvU0262w9VvkvIhAXWxAvMFtDTOzY2aWqoYJfDTmdaRHdj8c2F7A
UCknUC9a3Kwi3BubAARtO1zTe6fhvkdAm9eJi985Y98xaHHXU6QeDX0CAwEAAQ==
-----END RSA PUBLIC KEY-----

View file

@ -0,0 +1 @@
10.100.0.6/32

View file

@ -0,0 +1 @@
10.100.0.6

View file

@ -0,0 +1 @@
u0HcEa3lGDxqGqrot+9AtrqQNqNzOtCv/PDuuZqB9Ek=

Some files were not shown because too many files have changed in this diff Show more