feature/hass
Ingolf Wagner 2021-11-01 19:30:41 +01:00
parent 95d70d5225
commit 973e3b3305
Signed by: palo
GPG Key ID: 76BF5F1928B9618B
27 changed files with 359 additions and 291 deletions

View File

@ -1,19 +1,23 @@
{
# cat ~/.ssh/id_rsa.pub
publicSshKey ? "",
# remote-install-get-hiddenReceiver
hiddenReceiver ? "", }:
# cat ~/.ssh/id_rsa.pub
publicSshKey ? ""
, # remote-install-get-hiddenReceiver
hiddenReceiver ? ""
,
}:
{ config, lib, pkgs, ... }: {
imports = [
{ # system setup
{
# system setup
networking.hostName = "liveos";
users.extraUsers = {
root = { openssh.authorizedKeys.keys = [ publicSshKey ]; };
};
}
{ # installed packages
{
# installed packages
nixpkgs.config.allowUnfree = true;
environment.systemPackages = with pkgs; [
#style
@ -50,7 +54,8 @@ hiddenReceiver ? "", }:
dosfstools
];
}
{ # bash configuration
{
# bash configuration
programs.bash = {
enableCompletion = true;
interactiveShellInit = ''
@ -81,45 +86,49 @@ hiddenReceiver ? "", }:
'';
};
}
{ # ssh configuration
{
# ssh configuration
services.openssh.enable = true;
services.openssh.passwordAuthentication = false;
systemd.services.sshd.wantedBy = lib.mkForce [ "multi-user.target" ];
}
{ # hidden ssh announce
config = let
torDirectory = "/var/lib/tor";
hiddenServiceDir = torDirectory + "/liveos";
in {
services.tor = {
enable = true;
client.enable = true;
extraConfig = ''
HiddenServiceDir ${hiddenServiceDir}
HiddenServicePort 22 127.0.0.1:22
'';
};
systemd.services.hidden-ssh-announce = {
description = "irc announce hidden ssh";
after = [ "tor.service" "network-online.target" ];
wants = [ "tor.service" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
ExecStart = pkgs.writers.writeDash "irc-announce-ssh" ''
set -efu
until test -e ${hiddenServiceDir}/hostname; do
echo "still waiting for ${hiddenServiceDir}/hostname"
sleep 1
done
until ${pkgs.tor}/bin/torify ${pkgs.netcat-openbsd}/bin/nc -z ${hiddenReceiver} 1337; do sleep 1; done && \
echo "torify ssh root@$(cat ${hiddenServiceDir}/hostname) -i ~/.ssh/id_rsa" | ${pkgs.tor}/bin/torify ${pkgs.nmap}/bin/ncat ${hiddenReceiver} 1337
{
# hidden ssh announce
config =
let
torDirectory = "/var/lib/tor";
hiddenServiceDir = torDirectory + "/liveos";
in
{
services.tor = {
enable = true;
client.enable = true;
extraConfig = ''
HiddenServiceDir ${hiddenServiceDir}
HiddenServicePort 22 127.0.0.1:22
'';
PrivateTmp = "true";
User = "tor";
Type = "oneshot";
};
systemd.services.hidden-ssh-announce = {
description = "irc announce hidden ssh";
after = [ "tor.service" "network-online.target" ];
wants = [ "tor.service" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
ExecStart = pkgs.writers.writeDash "irc-announce-ssh" ''
set -efu
until test -e ${hiddenServiceDir}/hostname; do
echo "still waiting for ${hiddenServiceDir}/hostname"
sleep 1
done
until ${pkgs.tor}/bin/torify ${pkgs.netcat-openbsd}/bin/nc -z ${hiddenReceiver} 1337; do sleep 1; done && \
echo "torify ssh root@$(cat ${hiddenServiceDir}/hostname) -i ~/.ssh/id_rsa" | ${pkgs.tor}/bin/torify ${pkgs.nmap}/bin/ncat ${hiddenReceiver} 1337
'';
PrivateTmp = "true";
User = "tor";
Type = "oneshot";
};
};
};
};
}
];
}

View File

@ -11,7 +11,8 @@ let
hiddenReceiver = "";
};
in {
in
{
imports = [ remote-access ];

View File

@ -12,7 +12,8 @@ let
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC6uza62+Go9sBFs3XZE2OkugBv9PJ7Yv8ebCskE5WYPcahMZIKkQw+zkGI8EGzOPJhQEv2xk+XBf2VOzj0Fto4nh8X5+Llb1nM+YxQPk1SVlwbNAlhh24L1w2vKtBtMy277MF4EP+caGceYP6gki5+DzlPUSdFSAEFFWgN1WPkiyUii15Xi3QuCMR8F18dbwVUYbT11vwNhdiAXWphrQG+yPguALBGR+21JM6fffOln3BhoDUp2poVc5Qe2EBuUbRUV3/fOU4HwWVKZ7KCFvLZBSVFutXCj5HuNWJ5T3RuuxJSmY5lYuFZx9gD+n+DAEJt30iXWcaJlmUqQB5awcB1S2d9pJ141V4vjiCMKUJHIdspFrI23rFNYD9k2ZXDA8VOnQE33BzmgF9xOVh6qr4G0oEpsNqJoKybVTUeSyl4+ifzdQANouvySgLJV/pcqaxX1srSDIUlcM2vDMWAs3ryCa0aAlmAVZIHgRhh6wa+IXW8gIYt+5biPWUuihJ4zGBEwkyVXXf2xsecMWCAGPWPDL0/fBfY9krNfC5M2sqxey2ShFIq+R/wMdaI7yVjUCF2QIUNiIdFbJL6bDrDyHnEXJJN+rAo23jUoTZZRv7Jq3DB/A5H7a73VCcblZyUmwMSlpg3wos7pdw5Ctta3zQPoxoAKGS1uZ+yTeZbPMmdbw==";
hiddenReceiver = "";
};
in {
in
{
imports = [ remote-access ];

View File

@ -41,27 +41,29 @@
enable = true;
displayManager.auto.enable = true;
desktopManager = let
guide = pkgs.stdenv.mkDerivation {
name = "yubikey-guide-2019-01-21.html";
src = pkgs.fetchFromGitHub {
owner = "drduh";
repo = "YubiKey-Guide";
rev = "035d98ebbed54a0218ccbf23905054d32f97508e";
sha256 = "0rzy06a5xgfjpaklxdgrxml24d0vhk78lb577l3z4x7a2p32dbyq";
desktopManager =
let
guide = pkgs.stdenv.mkDerivation {
name = "yubikey-guide-2019-01-21.html";
src = pkgs.fetchFromGitHub {
owner = "drduh";
repo = "YubiKey-Guide";
rev = "035d98ebbed54a0218ccbf23905054d32f97508e";
sha256 = "0rzy06a5xgfjpaklxdgrxml24d0vhk78lb577l3z4x7a2p32dbyq";
};
buildInputs = [ pkgs.pandoc ];
installPhase =
"pandoc --highlight-style pygments -s --toc README.md -o $out";
};
buildInputs = [ pkgs.pandoc ];
installPhase =
"pandoc --highlight-style pygments -s --toc README.md -o $out";
in
{
default = "xfce";
xterm.enable = false;
xfce.enable = true;
xfce.extraSessionCommands = ''
${pkgs.midori}/bin/midori ${guide} &
${pkgs.xfce.terminal}/bin/xfce4-terminal &
'';
};
in {
default = "xfce";
xterm.enable = false;
xfce.enable = true;
xfce.extraSessionCommands = ''
${pkgs.midori}/bin/midori ${guide} &
${pkgs.xfce.terminal}/bin/xfce4-terminal &
'';
};
};
}

View File

@ -12,7 +12,8 @@ let
${pkgs.black}/bin/black --exclude venv ${toString ./.}
'';
in pkgs.mkShell {
in
pkgs.mkShell {
buildInputs = with pkgs; [ myPython startServer reformat ];

102
shell.nix
View File

@ -45,7 +45,8 @@ let
property.file = toString ./submodules/property;
} else
{ };
in {
in
{
system.file = toString ./system;
configs.file = toString ./configs;
nixos-config.symlink = "configs/${name}/configuration.nix";
@ -144,44 +145,55 @@ let
deployment = { secrets, content }:
name:
{ host ? (hostPattern name), target ? "/var/src/", user ? "root"
, commandPrefix ? "deploy", enableSwitch ? true, enableSecrets ? true }:
with ops;
let
commandName = if enableSecrets then
"${commandPrefix}-${name}-with-secrets"
else
"${commandPrefix}-${name}-without-secrets";
populateCommands = with lib;
flatten [
content
(optionals enableSecrets secrets)
(optionals enableSwitch [ switch ])
];
in jobs commandName "${user}@${host}${target}" populateCommands;
{ host ? (hostPattern name)
, target ? "/var/src/"
, user ? "root"
, commandPrefix ? "deploy"
, enableSwitch ? true
, enableSecrets ? true
}:
with ops;
let
commandName =
if enableSecrets then
"${commandPrefix}-${name}-with-secrets"
else
"${commandPrefix}-${name}-without-secrets";
populateCommands = with lib;
flatten [
content
(optionals enableSecrets secrets)
(optionals enableSwitch [ switch ])
];
in
jobs commandName "${user}@${host}${target}" populateCommands;
serverDeployment = name:
with ops;
deployment {
content = [
(populate source.nixPkgs)
(populate source.modules)
(populate (source.system name))
];
secrets = [ (populate (source.secrets name)) ];
} name;
deployment
{
content = [
(populate source.nixPkgs)
(populate source.modules)
(populate (source.system name))
];
secrets = [ (populate (source.secrets name)) ];
}
name;
desktopDeployment = name:
with ops;
deployment {
content = [
(populate source.nixPkgs)
(populate source.modules)
(populate (source.system name))
];
secrets =
[ (populate (source.secrets name)) (populate source.desktopSecrets) ];
} name;
deployment
{
content = [
(populate source.nixPkgs)
(populate source.modules)
(populate (source.system name))
];
secrets =
[ (populate (source.secrets name)) (populate source.desktopSecrets) ];
}
name;
cleanupNix = name:
{ ... }:
@ -191,13 +203,14 @@ let
user = "root";
port = "22";
};
in pkgs.writers.writeDashBin "clean-${name}" # sh
''
set -eu
${pkgs.openssh}/bin/ssh \
${target.user}@${target.host} -p ${target.port} \
nix-collect-garbage -d
'';
in
pkgs.writers.writeDashBin "clean-${name}" # sh
''
set -eu
${pkgs.openssh}/bin/ssh \
${target.user}@${target.host} -p ${target.port} \
nix-collect-garbage -d
'';
# helper function to make stuff more readable
runForAll = serverList: command: arguments:
@ -217,7 +230,8 @@ let
target = "/mnt/var/src";
enableSwitch = false;
};
in deployments ++ cleanup ++ install;
in
deployments ++ cleanup ++ install;
desktops = desktopList:
with lib;
@ -231,9 +245,11 @@ let
target = "/mnt/var/src";
enableSwitch = false;
};
in deployments ++ cleanup ++ install;
in
deployments ++ cleanup ++ install;
in pkgs.mkShell {
in
pkgs.mkShell {
buildInputs = with pkgs;
(servers [ "workhorse" "sputnik" "porani" "dummy" ])

View File

@ -0,0 +1 @@

View File

@ -11,7 +11,8 @@ let
# rev = "928f9e5e1d63e77a91f2ca57ffa2be1fef3078ec";
#sha256 = "0rs84c549l863vbnnqgnx7v6m2zlq0wz46jbhm4v1l1a25d966s1";
#};
in {
in
{
imports = [ (toString hcloud-modules) ];
# configure admin ssh keys

View File

@ -1,17 +1,20 @@
{ pkgs ? import <nixpkgs> { } }:
let
terranix = pkgs.callPackage (pkgs.fetchgit {
url = "https://github.com/mrVanDalo/terranix.git";
rev = "2.3.0";
sha256 = "030067h3gjc02llaa7rx5iml0ikvw6szadm0nrss2sqzshsfimm4";
}) { };
terranix = pkgs.callPackage
(pkgs.fetchgit {
url = "https://github.com/mrVanDalo/terranix.git";
rev = "2.3.0";
sha256 = "030067h3gjc02llaa7rx5iml0ikvw6szadm0nrss2sqzshsfimm4";
})
{ };
terraform = pkgs.writers.writeBashBin "terraform" ''
export TF_VAR_hcloud_api_token=`${pkgs.pass}/bin/pass development/hetzner.com/api-token`
${pkgs.terraform_0_12}/bin/terraform "$@"
'';
in pkgs.mkShell {
in
pkgs.mkShell {
buildInputs = [

View File

@ -3,43 +3,45 @@ with builtins; {
imports = [ ./provider.nix ./nginx.nix ./journald.nix ];
# create default index
resource.graylog_index_set.default = let
maxIndexSize = 200;
maxIndexCount = 20;
isDefault = true;
in {
title = "default";
description = ''
This is the default index set, where everything ends up which is
not specifically send to another index.
resource.graylog_index_set.default =
let
maxIndexSize = 200;
maxIndexCount = 20;
isDefault = true;
in
{
title = "default";
description = ''
This is the default index set, where everything ends up which is
not specifically send to another index.
Be aware this index can only hold ${
toString (maxIndexCount * maxIndexSize)
}MB of logs!
'';
default = isDefault;
index_prefix = "graylog";
rotation_strategy_class =
"org.graylog2.indexer.rotation.strategies.SizeBasedRotationStrategy";
retention_strategy_class =
"org.graylog2.indexer.retention.strategies.DeletionRetentionStrategy";
index_analyzer = "standard";
index_optimization_disabled = false;
writable = true;
shards = 1;
replicas = 0;
index_optimization_max_num_segments = 1;
field_type_refresh_interval = 5000;
retention_strategy = toJSON ({
max_number_of_indices = maxIndexCount;
type =
"org.graylog2.indexer.retention.strategies.DeletionRetentionStrategyConfig";
});
rotation_strategy = toJSON ({
#max_docs_per_index = 30000000;
max_size = maxIndexSize * 1024 * 1024;
type =
"org.graylog2.indexer.rotation.strategies.SizeBasedRotationStrategyConfig";
});
};
Be aware this index can only hold ${
toString (maxIndexCount * maxIndexSize)
}MB of logs!
'';
default = isDefault;
index_prefix = "graylog";
rotation_strategy_class =
"org.graylog2.indexer.rotation.strategies.SizeBasedRotationStrategy";
retention_strategy_class =
"org.graylog2.indexer.retention.strategies.DeletionRetentionStrategy";
index_analyzer = "standard";
index_optimization_disabled = false;
writable = true;
shards = 1;
replicas = 0;
index_optimization_max_num_segments = 1;
field_type_refresh_interval = 5000;
retention_strategy = toJSON ({
max_number_of_indices = maxIndexCount;
type =
"org.graylog2.indexer.retention.strategies.DeletionRetentionStrategyConfig";
});
rotation_strategy = toJSON ({
#max_docs_per_index = 30000000;
max_size = maxIndexSize * 1024 * 1024;
type =
"org.graylog2.indexer.rotation.strategies.SizeBasedRotationStrategyConfig";
});
};
}

View File

@ -2,19 +2,19 @@
# to send data to these inputs
log_format graylog2_json escape=json '{ "timestamp": "$time_iso8601", '
'"facility": "nginx", '
'"src_addr": "$remote_addr", '
'"body_bytes_sent": $body_bytes_sent, '
'"request_time": $request_time, '
'"response_status": $status, '
'"request": "$request", '
'"request_method": "$request_method", '
'"host": "$host",'
'"upstream_cache_status": "$upstream_cache_status",'
'"upstream_addr": "$upstream_addr",'
'"http_x_forwarded_for": "$http_x_forwarded_for",'
'"http_referrer": "$http_referer", '
'"http_user_agent": "$http_user_agent" }';
'"facility": "nginx", '
'"src_addr": "$remote_addr", '
'"body_bytes_sent": $body_bytes_sent, '
'"request_time": $request_time, '
'"response_status": $status, '
'"request": "$request", '
'"request_method": "$request_method", '
'"host": "$host",'
'"upstream_cache_status": "$upstream_cache_status",'
'"upstream_addr": "$upstream_addr",'
'"http_x_forwarded_for": "$http_x_forwarded_for",'
'"http_referrer": "$http_referer", '
'"http_user_agent": "$http_user_agent" }';
access_log syslog:server=${access_log_input} graylog2_json;
error_log syslog:server=${error_log_input};
@ -269,41 +269,43 @@ with builtins; {
};
};
graylog_stream_rule = let
nq_stream_rule = field: value: stream_id: {
inherit field value stream_id;
type = 1;
inverted = true;
graylog_stream_rule =
let
nq_stream_rule = field: value: stream_id: {
inherit field value stream_id;
type = 1;
inverted = true;
};
eq_stream_rule = field: value: stream_id: {
inherit field value stream_id;
type = 1;
inverted = false;
};
gt_stream_rule = field: value: stream_id: {
inherit field value stream_id;
type = 3;
inverted = false;
};
lt_stream_rule = field: value: stream_id: {
inherit field value stream_id;
type = 4;
inverted = false;
};
between = min: max: stream_id: {
"is_nginx_access_${min}_${max}" =
(eq_stream_rule "nginx_access" true stream_id);
"nginx_above${min}" = (gt_stream_rule "response_status" min stream_id);
"nginx_below${max}" = (lt_stream_rule "response_status" max stream_id);
};
in
(between "499" "600" "\${graylog_stream.nginx5xx.id}")
// (between "399" "500" "\${graylog_stream.nginx4xx.id}")
// (between "199" "300" "\${graylog_stream.nginx2xx.id}") // {
is_nginx_access = (eq_stream_rule "nginx_access" true
"\${graylog_stream.nginx_access.id}");
is_nginx_error =
(eq_stream_rule "nginx_error" true "\${graylog_stream.nginx_error.id}");
};
eq_stream_rule = field: value: stream_id: {
inherit field value stream_id;
type = 1;
inverted = false;
};
gt_stream_rule = field: value: stream_id: {
inherit field value stream_id;
type = 3;
inverted = false;
};
lt_stream_rule = field: value: stream_id: {
inherit field value stream_id;
type = 4;
inverted = false;
};
between = min: max: stream_id: {
"is_nginx_access_${min}_${max}" =
(eq_stream_rule "nginx_access" true stream_id);
"nginx_above${min}" = (gt_stream_rule "response_status" min stream_id);
"nginx_below${max}" = (lt_stream_rule "response_status" max stream_id);
};
in (between "499" "600" "\${graylog_stream.nginx5xx.id}")
// (between "399" "500" "\${graylog_stream.nginx4xx.id}")
// (between "199" "300" "\${graylog_stream.nginx2xx.id}") // {
is_nginx_access = (eq_stream_rule "nginx_access" true
"\${graylog_stream.nginx_access.id}");
is_nginx_error =
(eq_stream_rule "nginx_error" true "\${graylog_stream.nginx_error.id}");
};
};
}

View File

@ -5,7 +5,8 @@ let
getVariable = name: "\${ var.${name} }";
in {
in
{
hcloud = {
enable = true;

View File

@ -6,7 +6,8 @@ let
cfg = config.hcloud.nixserver;
in {
in
{
options.hcloud.nixserver = {
enable = mkEnableOption ''

View File

@ -14,7 +14,8 @@ let
};
});
in pkgs.mkShell {
in
pkgs.mkShell {
# needed pkgs
# -----------

View File

@ -5,7 +5,8 @@ let
rev = "5fa359a482892cd973dcc6ecfc607f4709f24495";
sha256 = "0smgmdiklj98y71fmcdjsqjq8l41i66hs8msc7k4m9dpkphqk86p";
};
in {
in
{
imports = [ "${hcloud-modules}/default.nix" ];
@ -43,8 +44,9 @@ in {
ServerAliveInterval 60
ServerAliveCountMax 3
'';
in concatStringsSep "\n"
(map configPart (attrNames config.hcloud.nixserver));
in
concatStringsSep "\n"
(map configPart (attrNames config.hcloud.nixserver));
};
}

View File

@ -34,7 +34,8 @@ let
apiImage = "thecodingmachine/workadventure-back:develop";
uploaderImage = "thecodingmachine/workadventure-uploader:develop";
in {
in
{
virtualisation.docker.enable = true;
boot.kernel.sysctl."net.ipv4.ip_forward" = true;

View File

@ -7,16 +7,18 @@ let
sha256 = "193pajq1gcd9jyd12nii06q1sf49xdhbjbfqk3lcq83s0miqfs63";
});
ops = let
overlay = self: super: {
# overwrite ssh to use the generated ssh configuration
openssh = super.writeShellScriptBin "ssh" ''
${super.openssh}/bin/ssh -F ${
toString ./generated/ssh-configuration
} "$@"
'';
};
in opsImport { overlays = [ overlay ]; };
ops =
let
overlay = self: super: {
# overwrite ssh to use the generated ssh configuration
openssh = super.writeShellScriptBin "ssh" ''
${super.openssh}/bin/ssh -F ${
toString ./generated/ssh-configuration
} "$@"
'';
};
in
opsImport { overlays = [ overlay ]; };
lib = ops.lib;
pkgs = ops.pkgs;
@ -49,18 +51,19 @@ let
deployServer = name:
{ user ? "root", host, ... }:
with ops;
jobs "deploy-${name}" "${user}@${host.ipv4}" [
# deploy secrets to /run/plops-secrets/secrets
# (populateTmpfs (source.secrets name))
# deploy system to /var/src/system
(populate (source.system name))
# deploy nixpkgs to /var/src/nixpkgs
(populate source.nixPkgs)
switch
];
with ops;
jobs "deploy-${name}" "${user}@${host.ipv4}" [
# deploy secrets to /run/plops-secrets/secrets
# (populateTmpfs (source.secrets name))
# deploy system to /var/src/system
(populate (source.system name))
# deploy nixpkgs to /var/src/nixpkgs
(populate source.nixPkgs)
switch
];
in pkgs.mkShell {
in
pkgs.mkShell {
buildInputs = lib.mapAttrsToList deployServer servers;

View File

@ -13,7 +13,8 @@ let
${pkgs.terraform_0_12}/bin/terraform "$@"
'';
in pkgs.mkShell {
in
pkgs.mkShell {
buildInputs = [

View File

@ -10,7 +10,8 @@ let
hcloud-modules = /home/palo/dev/terranix-hcloud/terraform-0.11;
in {
in
{
imports = [ (toString hcloud-modules) ./config/ssh-setup.nix ];

View File

@ -19,7 +19,8 @@
};
'';
allServerParts = map serverPart (attrNames config.hcloud.server);
in ''
in
''
{
${concatStringsSep "\n" allServerParts}
}

View File

@ -11,7 +11,8 @@ let
publicKeyFile = ../../sshkey.pub;
};
target = file: "${toString ../../02-build/generated}/${file}";
in {
in
{
# configure admin ssh keys
users.admins.palo.publicKey = lib.fileContents ssh.publicKeyFile;
@ -36,8 +37,9 @@ in {
ServerAliveInterval 60
ServerAliveCountMax 3
'';
in concatStringsSep "\n"
(map configPart (attrNames config.hcloud.server));
in
concatStringsSep "\n"
(map configPart (attrNames config.hcloud.server));
};
};
}

View File

@ -2,11 +2,13 @@
let
terranix = pkgs.callPackage (pkgs.fetchgit {
url = "https://github.com/mrVanDalo/terranix.git";
rev = "6097722f3a94972a92d810f3a707351cd425a4be";
sha256 = "1d8w82mvgflmscvq133pz9ynr79cgd5qjggng85byk8axj6fg6jw";
}) { };
terranix = pkgs.callPackage
(pkgs.fetchgit {
url = "https://github.com/mrVanDalo/terranix.git";
rev = "6097722f3a94972a92d810f3a707351cd425a4be";
sha256 = "1d8w82mvgflmscvq133pz9ynr79cgd5qjggng85byk8axj6fg6jw";
})
{ };
terraform = pkgs.writers.writeDashBin "terraform" ''
export TF_VAR_hcloud_api_token=`${pkgs.pass}/bin/pass development/hetzner.com/api-token`
@ -27,7 +29,8 @@ let
rm ${toString ./.}/terraform.tfstate*
'';
in pkgs.mkShell {
in
pkgs.mkShell {
buildInputs = with pkgs; [ terranix terraform create destroy ];

View File

@ -4,21 +4,23 @@ with pkgs.lib;
let
ops = let
opsImport = import ((import <nixpkgs> { }).fetchgit {
url = "https://github.com/mrVanDalo/plops.git";
rev = "9fabba016a3553ae6e13d5d17d279c4de2eb00ad";
sha256 = "193pajq1gcd9jyd12nii06q1sf49xdhbjbfqk3lcq83s0miqfs63";
});
overlay = self: super: {
# overwrite ssh to use the generated ssh configuration
openssh = super.writers.writeBashBin "ssh" ''
${super.openssh}/bin/ssh -F ${
toString ./generated/ssh-configuration
} "$@"
'';
};
in opsImport { overlays = [ overlay ]; };
ops =
let
opsImport = import ((import <nixpkgs> { }).fetchgit {
url = "https://github.com/mrVanDalo/plops.git";
rev = "9fabba016a3553ae6e13d5d17d279c4de2eb00ad";
sha256 = "193pajq1gcd9jyd12nii06q1sf49xdhbjbfqk3lcq83s0miqfs63";
});
overlay = self: super: {
# overwrite ssh to use the generated ssh configuration
openssh = super.writers.writeBashBin "ssh" ''
${super.openssh}/bin/ssh -F ${
toString ./generated/ssh-configuration
} "$@"
'';
};
in
opsImport { overlays = [ overlay ]; };
lib = ops.lib;
pkgs = ops.pkgs;
@ -48,13 +50,13 @@ let
deployServer = name:
{ user ? "root", host, ... }:
with ops;
jobs "deploy-${name}" "${user}@${host.ipv4}" [
(populate (source.system name))
(populate source.nixPkgs)
(populate source.modules)
switch
];
with ops;
jobs "deploy-${name}" "${user}@${host.ipv4}" [
(populate (source.system name))
(populate source.nixPkgs)
(populate source.modules)
switch
];
moshServer = name:
{ user ? "root", host, ... }:
@ -66,7 +68,8 @@ let
"${user}@${host.ipv4}"
'';
in pkgs.mkShell {
in
pkgs.mkShell {
buildInputs = lib.mapAttrsToList deployServer servers
++ mapAttrsToList moshServer servers;

View File

@ -5,7 +5,8 @@ let
rev = "5fa359a482892cd973dcc6ecfc607f4709f24495";
sha256 = "0smgmdiklj98y71fmcdjsqjq8l41i66hs8msc7k4m9dpkphqk86p";
};
in {
in
{
imports = [ "${hcloud-modules}/default.nix" ];
@ -40,8 +41,9 @@ in {
ServerAliveInterval 60
ServerAliveCountMax 3
'';
in concatStringsSep "\n"
(map configPart (attrNames config.hcloud.nixserver));
in
concatStringsSep "\n"
(map configPart (attrNames config.hcloud.nixserver));
};
}

View File

@ -35,7 +35,8 @@ let
apiImage = "thecodingmachine/workadventure-back:${version}";
uploaderImage = "thecodingmachine/workadventure-uploader:${version}";
in {
in
{
virtualisation.docker.enable = true;
boot.kernel.sysctl."net.ipv4.ip_forward" = true;

View File

@ -7,16 +7,18 @@ let
sha256 = "193pajq1gcd9jyd12nii06q1sf49xdhbjbfqk3lcq83s0miqfs63";
});
ops = let
overlay = self: super: {
# overwrite ssh to use the generated ssh configuration
openssh = super.writeShellScriptBin "ssh" ''
${super.openssh}/bin/ssh -F ${
toString ./generated/ssh-configuration
} "$@"
'';
};
in opsImport { overlays = [ overlay ]; };
ops =
let
overlay = self: super: {
# overwrite ssh to use the generated ssh configuration
openssh = super.writeShellScriptBin "ssh" ''
${super.openssh}/bin/ssh -F ${
toString ./generated/ssh-configuration
} "$@"
'';
};
in
opsImport { overlays = [ overlay ]; };
lib = ops.lib;
pkgs = ops.pkgs;
@ -49,18 +51,19 @@ let
deployServer = name:
{ user ? "root", host, ... }:
with ops;
jobs "deploy-${name}" "${user}@${host.ipv4}" [
# deploy secrets to /run/plops-secrets/secrets
# (populateTmpfs (source.secrets name))
# deploy system to /var/src/system
(populate (source.system name))
# deploy nixpkgs to /var/src/nixpkgs
(populate source.nixPkgs)
switch
];
with ops;
jobs "deploy-${name}" "${user}@${host.ipv4}" [
# deploy secrets to /run/plops-secrets/secrets
# (populateTmpfs (source.secrets name))
# deploy system to /var/src/system
(populate (source.system name))
# deploy nixpkgs to /var/src/nixpkgs
(populate source.nixPkgs)
switch
];
in pkgs.mkShell {
in
pkgs.mkShell {
buildInputs = lib.mapAttrsToList deployServer servers;

View File

@ -1,18 +1,21 @@
{ pkgs ? import <nixpkgs> { } }:
let
terranix = pkgs.callPackage (pkgs.fetchgit {
url = "https://github.com/mrVanDalo/terranix.git";
rev = "2.3.0";
sha256 = "030067h3gjc02llaa7rx5iml0ikvw6szadm0nrss2sqzshsfimm4";
}) { };
terranix = pkgs.callPackage
(pkgs.fetchgit {
url = "https://github.com/mrVanDalo/terranix.git";
rev = "2.3.0";
sha256 = "030067h3gjc02llaa7rx5iml0ikvw6szadm0nrss2sqzshsfimm4";
})
{ };
terraform = pkgs.writers.writeBashBin "terraform" ''
export TF_VAR_hcloud_api_token=`${pkgs.pass}/bin/pass development/hetzner.com/api-token`
${pkgs.terraform_0_12}/bin/terraform "$@"
'';
in pkgs.mkShell {
in
pkgs.mkShell {
buildInputs = [