terranix: delete graylog and sysfire server

This commit is contained in:
Ingolf Wagner 2020-05-20 00:29:13 +02:00
parent 85b846893e
commit ea9b422de7
No known key found for this signature in database
GPG key ID: 76BF5F1928B9618B
23 changed files with 0 additions and 2135 deletions

View file

@ -1,17 +0,0 @@
# The idea
on all messages are pipelines which are quick and forward traffic to
dedicated streams, on theses streams the more costly but also richer
pipelines are triggered.
* avoid extractors, because they are applied on every message.
# Use Generic Geo Ip Location plugin (at the end)
this way I don't have to parse everything myself.
# Use Content Packs
* for nginx

View file

@ -1,142 +0,0 @@
# https://github.com/suzuki-shunsuke/go-graylog/tree/master/terraform
{ pgks, lib, ... }: {
imports = [
./modules
./config/elasticsearch.nix
./config/gogs.nix
./config/home-assistant.nix
./config/kernel.nix
#./config/nginx.nix
./config/sshd.nix
./config/sslh.nix
./config/sshguard.nix
./config/tinc.nix
];
# ---- [ default ]
data."graylog_index_set".default.index_prefix = "graylog";
# ---- [ junk ]
resource."graylog_index_set".junk = {
title = "junk index";
index_prefix = "trash";
# https://godoc.org/github.com/suzuki-shunsuke/go-graylog#pkg-constants
rotation_strategy_class =
"org.graylog2.indexer.rotation.strategies.SizeBasedRotationStrategy";
rotation_strategy = {
type =
"org.graylog2.indexer.rotation.strategies.SizeBasedRotationStrategyConfig";
max_size = 1024 * 1024 * 10;
};
retention_strategy_class =
"org.graylog2.indexer.retention.strategies.DeletionRetentionStrategy";
retention_strategy = {
type =
"org.graylog2.indexer.retention.strategies.DeletionRetentionStrategyConfig";
max_number_of_indices = 10;
};
index_analyzer = "standard";
shards = 1;
index_optimization_max_num_segments = 1;
field_type_refresh_interval = 10000;
writable = "true";
};
graylog.stream.junk = { index_set_id = "\${graylog_index_set.junk.id}"; };
# ---- [ thread ]
resource."graylog_index_set".thread = {
title = "thread";
index_prefix = "thread";
# https://godoc.org/github.com/suzuki-shunsuke/go-graylog#pkg-constants
rotation_strategy_class =
"org.graylog2.indexer.rotation.strategies.SizeBasedRotationStrategy";
rotation_strategy = {
type =
"org.graylog2.indexer.rotation.strategies.SizeBasedRotationStrategyConfig";
max_size = 1024 * 1024 * 10;
};
retention_strategy_class =
"org.graylog2.indexer.retention.strategies.DeletionRetentionStrategy";
retention_strategy = {
type =
"org.graylog2.indexer.retention.strategies.DeletionRetentionStrategyConfig";
max_number_of_indices = 20;
};
index_analyzer = "standard";
shards = 1;
index_optimization_max_num_segments = 1;
field_type_refresh_interval = 10000;
writable = "true";
};
graylog.stream.thread = {
index_set_id = "\${graylog_index_set.thread.id}";
#pipelines = [ "\${graylog_pipeline.processThreads.id}" ];
};
#resource."graylog_stream_rule"."is_thread" = {
# field = "is_thread";
# value = "true";
# stream_id = "\${graylog_stream.thread.id}";
# description = "route everything that is a thread";
# #type = 0;
# #inverted = false;
#};
# not necessary because we have a geoip resolver
#graylog.pipeline.processThreads = {
# source = ''
# stage 0 match all
# rule "extract source_ip position";
# '';
# description = "process messages of the thread stream(TF)";
#};
#resource."graylog_pipeline_rule".extractSourceIpPosition = {
# description = "";
# source = ''
# rule "extract source_ip position"
# when
# has_field("source_ip")
# then
# let geo = lookup("geo_city_lookup", to_string($message.source_ip));
# set_field("ip_geolocation", geo["coordinates"]);
# set_field("ip_geo_country_code", geo["country"].iso_code);
# set_field("ip_geo_country_name", geo["country"].names.en);
# set_field("ip_geo_city_name", geo["city"].names.en);
# end
# '';
#};
#resource."graylog_pipeline_rule".extractRemoteIpPosition = {
# description = "";
# source = ''
# rule "extract remote_addr position"
# when
# has_field("remote_addr")
# then
# let geo = lookup("geo_city_lookup", to_string($message.remote_addr));
# set_field("ip_geolocation", geo["coordinates"]);
# set_field("ip_geo_country_code", geo["country"].iso_code);
# set_field("ip_geo_country_name", geo["country"].names.en);
# set_field("ip_geo_city_name", geo["city"].names.en);
# end
# '';
#};
#graylog.all_messages.rules = [ "extract remote_addr position" ];
}

View file

@ -1,48 +0,0 @@
# filters elasticsearch messages
{
resource."graylog_pipeline_rule" = {
routeToElasticSearchMessage = {
description = "route elasticsearch messages to elasticsearch stream (TF)";
source = ''
rule "route elasticsearch message"
when
to_string($message.facility) == "elasticsearch"
then
route_to_stream(id:"''${ graylog_stream.elasticsearch.id }", remove_from_default: true);
end
'';
};
elasticsearchJunk = {
source = ''
rule "mark and route elasticsearch junk"
when
starts_with(to_string($message.message), "Received short packet")
then
set_field("is_junk", true);
route_to_stream(id:"''${graylog_stream.junk.id}", remove_from_default: true);
end
'';
description = "mark elasticsearch noise as junk (TF)";
};
};
graylog.all_messages.rules = [ "route elasticsearch message" ];
graylog.stream.elasticsearch = {
index_set_id = "\${data.graylog_index_set.default.id}";
pipelines = [ "\${graylog_pipeline.processElasticSearchMessage.id}" ];
};
graylog.pipeline.processElasticSearchMessage = {
source = ''
stage 0 match all
rule "mark and route elasticsearch junk";
'';
description = "process messages of the elasticsearch stream(TF)";
};
}

View file

@ -1,35 +0,0 @@
# filters gogs messages
{
resource."graylog_pipeline_rule" = {
routeToGogsMessage = {
description = "route gogs messages to gogs stream (TF)";
source = ''
rule "route gogs message"
when
to_string($message.facility) == "gogs"
then
route_to_stream(id:"''${ graylog_stream.gogs.id }", remove_from_default: true);
end
'';
};
};
graylog.all_messages.rules = [ "route gogs message" ];
graylog.stream.gogs = {
index_set_id = "\${data.graylog_index_set.default.id}";
#pipelines = [ "\${graylog_pipeline.processGogsMessage.id}" ];
};
#graylog.pipeline.processGogsMessage = {
# source = ''
# stage 0 match all
# rule "extract firewall deny";
# '';
# description = "process messages of the gogs stream(TF)";
#};
}

View file

@ -1,36 +0,0 @@
# filters kernel messages
{
resource."graylog_pipeline_rule" = {
routeToHomeAssistant = {
description = "route hass messages to hass stream (TF)";
source = ''
rule "route hass message"
when
to_string($message.facility) == "hass"
then
route_to_stream(id:"''${ graylog_stream.homeassistant.id }", remove_from_default: true);
end
'';
};
};
graylog.all_messages.rules = [ "route hass message" ];
graylog.stream.homeassistant = {
index_set_id = "\${data.graylog_index_set.default.id}";
#pipelines = [ "\${graylog_pipeline.processHomeAssistantMessage.id}" ];
#pipelines = [ "\${graylog_pipeline.processHomeAssistantMessage.id}" ];
};
#graylog.pipeline.processHomeAssistantMessage = {
# source = ''
# stage 0 match all
# rule "extract firewall deny";
# '';
# description = "process messages of the kernel stream(TF)";
#};
}

View file

@ -1,49 +0,0 @@
# filters kernel messages
{
resource."graylog_pipeline_rule" = {
routeToKernelMessage = {
description = "route kernel messages to kernel stream (TF)";
source = ''
rule "route kernel message"
when
to_string($message.facility) == "kernel"
then
route_to_stream(id:"''${ graylog_stream.kernel.id }", remove_from_default: true);
end
'';
};
extractFirewallDeny = {
description = "extract information form a firewall deny (TF)";
source = ''
rule "extract firewall deny"
when
starts_with(to_string($message.message), "refused connection:")
then
set_fields(grok("SRC=%{IP:source_ip} .* DPT=%{NUMBER:destination_port}", to_string($message.message)));
set_field("is_thread", true);
route_to_stream(id:"''${ graylog_stream.thread.id }");
end
'';
};
};
graylog.all_messages.rules = [ "route kernel message" ];
graylog.stream.kernel = {
index_set_id = "\${data.graylog_index_set.default.id}";
pipelines = [ "\${graylog_pipeline.processKernelMessage.id}" ];
};
graylog.pipeline.processKernelMessage = {
source = ''
stage 0 match all
rule "extract firewall deny";
'';
description = "process messages of the kernel stream(TF)";
};
}

View file

@ -1,36 +0,0 @@
# filters nginx messages
{ config, ... }: {
resource."graylog_pipeline_rule" = {
# not working for some reason
extractHttpCode = {
description = "extract thread information nginx access (TF)";
source = ''
rule "extract response code"
when
has_field("response_status")
then
set_field("response_status_description", lookup_value("http_codes_description", to_long($message.response_status)));
end
'';
};
};
graylog.pipeline.processNginxMessage = {
source = ''
stage 99 match all
rule "extract response code";
'';
#streamId = config.graylog.all_messages.streamId;
#streamId = "\${data.graylog_stream.nginx.id}";
};
resource.graylog_pipeline_connection.processNginxMessage = {
stream_id = "\${data.graylog_stream.nginx.id}";
pipeline_ids = [ "\${graylog_pipeline.processNginxMessage.id}" ];
};
data.graylog_stream.nginx.title = "nginx";
}

View file

@ -1,35 +0,0 @@
# filters sshd messages
{
resource."graylog_pipeline_rule" = {
routeToSshdMessage = {
description = "route sshd messages to sshd stream (TF)";
source = ''
rule "route sshd message"
when
to_string($message.facility) == "sshd"
then
route_to_stream(id:"''${ graylog_stream.sshd.id }", remove_from_default: true);
end
'';
};
};
graylog.all_messages.rules = [ "route sshd message" ];
graylog.stream.sshd = {
index_set_id = "\${data.graylog_index_set.default.id}";
#pipelines = [ "\${graylog_pipeline.processSshdMessage.id}" ];
};
#graylog.pipeline.processSshdMessage = {
# source = ''
# stage 0 match all
# rule "mark and route sshd junk";
# '';
# description = "process messages of the sshd stream(TF)";
#};
}

View file

@ -1,49 +0,0 @@
# filters sshguard messages
{
resource."graylog_pipeline_rule" = {
routeToSshGuardMessage = {
description = "route sshguard messages to sshguard stream (TF)";
source = ''
rule "route sshguard message"
when
to_string($message.facility) == "sshguard"
then
route_to_stream(id:"''${ graylog_stream.sshguard.id }", remove_from_default: true);
end
'';
};
extractAttack = {
description = "extract sshguard attack information (TF)";
source = ''
rule "extract sshguard attack"
when
starts_with(to_string($message.message), "Attack from")
then
set_fields(grok(pattern:"Attack from \"%{IPV4:source_ip}\"", value: to_string($message.message), only_named_captures: true));
set_field("is_thread", true);
route_to_stream(id:"''${ graylog_stream.thread.id }");
end
'';
};
};
graylog.all_messages.rules = [ "route sshguard message" ];
graylog.stream.sshguard = {
index_set_id = "\${data.graylog_index_set.default.id}";
pipelines = [ "\${graylog_pipeline.processSshGuardMessage.id}" ];
};
graylog.pipeline.processSshGuardMessage = {
source = ''
stage 0 match all
rule "extract sshguard attack";
'';
description = "process messages of the sshguard stream(TF)";
};
}

View file

@ -1,49 +0,0 @@
# filters sslh messages
{
resource."graylog_pipeline_rule" = {
routeToSslhMessage = {
description = "route sslh messages to sslh stream (TF)";
source = ''
rule "route sslh message"
when
to_string($message.facility) == "sslh"
then
route_to_stream(id:"''${ graylog_stream.sslh.id }", remove_from_default: true);
end
'';
};
sslhJunk = {
source = ''
rule "mark and route sslh junk"
when
starts_with(to_string($message.message), "client socket closed")
then
drop_message();
//set_field("is_junk", true);
//route_to_stream(id:"''${graylog_stream.junk.id}", remove_from_default: true);
end
'';
description = "mark tinc noise as junk (TF)";
};
};
graylog.all_messages.rules = [ "route sslh message" ];
graylog.stream.sslh = {
index_set_id = "\${data.graylog_index_set.default.id}";
pipelines = [ "\${graylog_pipeline.processSslhMessage.id}" ];
};
graylog.pipeline.processSslhMessage = {
source = ''
stage 0 match all
rule "mark and route sslh junk";
'';
description = "process messages of the sslh stream(TF)";
};
}

View file

@ -1,49 +0,0 @@
# filters tinc messages
{
resource."graylog_pipeline_rule" = {
routeToTincMessage = {
description = "route tinc messages to tinc stream (TF)";
source = ''
rule "route tinc message"
when
to_string($message.facility) == "tincd"
then
route_to_stream(id:"''${ graylog_stream.tinc.id }", remove_from_default: true);
end
'';
};
tincJunk = {
source = ''
rule "mark and route tinc junk"
when
starts_with(to_string($message.message), "Received short packet")
then
drop_message();
//set_field("is_junk", true);
//route_to_stream(id:"''${graylog_stream.junk.id}", remove_from_default: true);
end
'';
description = "mark tinc noise as junk (TF)";
};
};
graylog.all_messages.rules = [ "route tinc message" ];
graylog.stream.tinc = {
index_set_id = "\${data.graylog_index_set.default.id}";
pipelines = [ "\${graylog_pipeline.processTincMessage.id}" ];
};
graylog.pipeline.processTincMessage = {
source = ''
stage 0 match all
rule "mark and route tinc junk";
'';
description = "process messages of the tinc stream(TF)";
};
}

View file

@ -1,13 +0,0 @@
Copyright 2019 Paul Barfuss
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,23 +0,0 @@
### Configuring nginx
You need to run at least nginx version 1.11.8, with `escape=json` support.
**Add this to your nginx configuration file and restart the service:**
log_format graylog2_json escape=json '{ "timestamp": "$time_iso8601", '
'"remote_addr": "$remote_addr", '
'"body_bytes_sent": $body_bytes_sent, '
'"request_time": $request_time, '
'"response_status": $status, '
'"request": "$request", '
'"request_method": "$request_method", '
'"host": "$host",'
'"upstream_cache_status": "$upstream_cache_status",'
'"upstream_addr": "$upstream_addr",'
'"http_x_forwarded_for": "$http_x_forwarded_for",'
'"http_referrer": "$http_referer", '
'"http_user_agent": "$http_user_agent" }';
# replace the hostnames with the IP or hostname of your Graylog2 server
access_log syslog:server=graylog.server.org:12304 graylog2_json;
error_log syslog:server=graylog.server.org:12305;

View file

@ -1 +0,0 @@
{"id":"2e0a8254-6356-4fff-b956-883de11d0e4f","rev":1,"v":"1","name":"Systemd Sink","summary":"SystemD synk stuff","description":"Used for Journald2Gelf stuff ","vendor":"palo","url":"","created_at":"2019-10-17T16:19:12.642Z","server_version":"3.0.2+1686930","parameters":[],"entities":[{"id":"26439aa8-e05f-4571-a8cb-03be586ee23d","type":{"name":"input","version":"1"},"v":"1","data":{"title":{"@type":"string","@value":"Systemd Logging"},"configuration":{"recv_buffer_size":{"@type":"integer","@value":262144},"port":{"@type":"integer","@value":11201},"number_worker_threads":{"@type":"integer","@value":4},"bind_address":{"@type":"string","@value":"0.0.0.0"},"decompress_size_limit":{"@type":"integer","@value":8388608}},"static_fields":{},"type":{"@type":"string","@value":"org.graylog2.inputs.gelf.udp.GELFUDPInput"},"global":{"@type":"boolean","@value":true},"extractors":[]},"constraints":[{"type":"server-version","version":">=3.0.2+1686930"}]}]}

View file

@ -1,43 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.graylog.all_messages;
in {
options.graylog.all_messages = {
streamId = mkOption {
type = with types; str;
default = "000000000000000000000001";
description = ''
id of "All Messages" stream;
'';
};
rules = mkOption {
default = [ ];
type = with types; listOf str;
example = [ "route sshd" "route kernel" "mark junk" ];
description = ''
all the rules which should be called on the pipeline operating on the
all_messages input
'';
};
};
config = mkIf (cfg.rules != [ ]) {
graylog.pipeline.mainsorting = {
source = let rules = map (rule: " rule \"${rule}\";") cfg.rules;
in ''
stage 0 match either
${concatStringsSep "\n" rules}
'';
description = "main sorting pipeline (TF)";
streamId = cfg.streamId;
};
};
}

View file

@ -1 +0,0 @@
{ imports = [ ./all-messages.nix ./provider.nix ./pipeline.nix ./stream.nix ]; }

View file

@ -1,85 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.graylog.pipeline;
in {
options.graylog.pipeline = mkOption {
default = { };
type = with types;
attrsOf (submodule ({ name, ... }: {
options = {
name = mkOption {
type = with types; str;
default = name;
description = ''
name of the pipeline
'';
};
streamId = mkOption {
type = with types; nullOr str;
default = null;
example = "000000000000000001";
description = ''
stream id on which this pipeline should operate
dont use terraform references here.
'';
};
description = mkOption {
type = with types; str;
default = "";
description = ''
description of the pipeline
'';
};
# todo : create proper module system here
source = mkOption {
type = with types; str;
description = ''
source of the pipeline (without the header and the end)
'';
};
};
}));
};
config = let
allPipelines = cfg;
allPipelineConnections = mapAttrsToList (name: values: {
"${values.streamId}" = "\${graylog_pipeline.${name}.id}";
}) (filterAttrs (name: values: values.streamId != null) allPipelines);
collected = foldAttrs (n: a: [ n ] ++ a) [ ] allPipelineConnections;
in mkIf (cfg != { }) {
# create pipelines
resource."graylog_pipeline" = mapAttrs (name: pipelineConfig: {
source = ''
pipeline "${pipelineConfig.name}"
${pipelineConfig.source}
end
'';
description = pipelineConfig.description;
}) allPipelines;
resource."graylog_pipeline_connection" = let
mapping = filter (name: builtins.match ".*\\$.*" name != null)
(builtins.attrNames collected);
in mkAssert (mapping == [ ]) ''
graylog.pipeline.<name>.streamId =
${concatStringsSep "\n " mapping}
is not valid.
use graylog.stream.<name>.pipelines instead
'' (mapAttrs (name: pipelineConfig: {
stream_id = name;
pipeline_ids = pipelineConfig;
}) collected);
};
}

View file

@ -1,14 +0,0 @@
{
variable.web_endpoint_uri.description = "";
variable.auth_name.description = "";
variable.auth_password.description = "";
provider."graylog" = {
web_endpoint_uri = "\${var.web_endpoint_uri}";
auth_name = "\${var.auth_name}";
auth_password = "\${var.auth_password}";
api_version = "v3";
};
}

View file

@ -1,47 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.graylog.stream;
in {
options.graylog.stream = mkOption {
default = { };
type = with types;
attrsOf (submodule ({ name, ... }: {
options = {
title = mkOption {
default = name;
type = with types; str;
};
index_set_id = mkOption { type = with types; str; };
disabled = mkOption {
default = false;
type = with types; bool;
};
matching_type = mkOption {
default = "AND";
type = with types; str;
};
pipelines = mkOption {
default = [ ];
type = with types; listOf str;
};
};
}));
};
config = mkIf (cfg != { }) {
resource.graylog_stream = mapAttrs (name: value: {
inherit (value) title index_set_id disabled matching_type;
}) cfg;
resource.graylog_pipeline_connection = mapAttrs (name: pipelineConfig: {
stream_id = "\${graylog_stream.${name}.id}";
pipeline_ids = pipelineConfig.pipelines;
}) cfg;
};
}

View file

@ -1,71 +0,0 @@
{ pkgs ? import <nixpkgs> { } }:
let
terranix = pkgs.callPackage (pkgs.fetchgit {
url = "https://github.com/mrVanDalo/terranix.git";
rev = "6097722f3a94972a92d810f3a707351cd425a4be";
sha256 = "1d8w82mvgflmscvq133pz9ynr79cgd5qjggng85byk8axj6fg6jw";
}) { };
# a custom provider for terraform
graylog = pkgs.buildGoModule rec {
name = "terraform-provider-graylog-${version}";
version = "v3.3.0";
subPackages = [ "./terraform" ];
src = pkgs.fetchFromGitHub {
owner = "suzuki-shunsuke";
repo = "go-graylog";
sha256 = "12b0d70qzwaqgzksiyc7ia86g7869b1a6mfymqzkp2h5h4kcwcfh";
rev = "${version}";
};
modSha256 = "0zbly0wyqa4jw6h54b1y03j6v1c5fqgslfdyrzii9rpq3y6g0kkf";
postInstall = "mv $out/bin/terraform{,-provider-graylog_${version}}";
meta = with pkgs.stdenv.lib; {
homepage = "https://github.com/suzuki-shunsuke/go-graylog";
description = "Terraform provider is used to manage graylog.";
platforms = platforms.linux;
license = licenses.mpl20;
maintainers = with maintainers; [ palo ];
};
};
terraform = pkgs.terraform.withPlugins (p: [ graylog ]);
in pkgs.mkShell {
buildInputs = [
# terraform wrapper to set access variables
# -----------------------------------------
(pkgs.writeShellScriptBin "terraform" ''
export TF_VAR_web_endpoint_uri="http://schasch.private:9000/api"
export TF_VAR_auth_name=admin
export TF_VAR_auth_password=yourpassword
${terraform}/bin/terraform "$@"
'')
# terranix to avoid HCL
# ---------------------
terranix
# tooling
# -------
pkgs.terraform-landscape
pkgs.terraform-docs
];
shellHook = ''
# save shell history in project folder
HISTFILE=${toString ./.history}
# configure password store to use subfolder
export PASSWORD_STORE_DIR=./secrets
'';
}

View file

@ -1,35 +0,0 @@
{ pkgs, lib, ... }:
let
hcloud-modules = pkgs.fetchgit {
url = "https://github.com/mrVanDalo/terranix-hcloud.git";
rev = "c3571f76664e1813f90d97b8c194a1e0149e895e";
sha256 = "0plld74wincyy3c5gdfqh78pzrqibxh6r839dm0c717fajr9imwb";
};
in {
imports = [ (toString hcloud-modules) ];
# configure admin ssh keys
users.admins.palo.publicKey =
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC6uza62+Go9sBFs3XZE2OkugBv9PJ7Yv8ebCskE5WYPcahMZIKkQw+zkGI8EGzOPJhQEv2xk+XBf2VOzj0Fto4nh8X5+Llb1nM+YxQPk1SVlwbNAlhh24L1w2vKtBtMy277MF4EP+caGceYP6gki5+DzlPUSdFSAEFFWgN1WPkiyUii15Xi3QuCMR8F18dbwVUYbT11vwNhdiAXWphrQG+yPguALBGR+21JM6fffOln3BhoDUp2poVc5Qe2EBuUbRUV3/fOU4HwWVKZ7KCFvLZBSVFutXCj5HuNWJ5T3RuuxJSmY5lYuFZx9gD+n+DAEJt30iXWcaJlmUqQB5awcB1S2d9pJ141V4vjiCMKUJHIdspFrI23rFNYD9k2ZXDA8VOnQE33BzmgF9xOVh6qr4G0oEpsNqJoKybVTUeSyl4+ifzdQANouvySgLJV/pcqaxX1srSDIUlcM2vDMWAs3ryCa0aAlmAVZIHgRhh6wa+IXW8gIYt+5biPWUuihJ4zGBEwkyVXXf2xsecMWCAGPWPDL0/fBfY9krNfC5M2sqxey2ShFIq+R/wMdaI7yVjUCF2QIUNiIdFbJL6bDrDyHnEXJJN+rAo23jUoTZZRv7Jq3DB/A5H7a73VCcblZyUmwMSlpg3wos7pdw5Ctta3zQPoxoAKGS1uZ+yTeZbPMmdbw== cardno:000611343142";
users.admins.netsysfire.publicKey =
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILYl33wPGHM5xyq4T3xxMRgaYph1+0Rak4rPpyBdBcdr";
users.admins.terranix.publicKey = "${lib.fileContents ./sshkey.pub}";
# configure provisioning private Key to be used when running provisioning on the machines
provisioner.privateKeyFile = toString ./sshkey;
hcloud.nixserver = {
playground = {
enable = true;
serverType = "cx31";
configurationFile = pkgs.writeText "configuration.nix" ''
{ pkgs, lib, config, ... }:
{ }
'';
};
};
hcloud.export.nix = null;
}

View file

@ -1,46 +0,0 @@
{ pkgs ? import <nixpkgs> { } }:
let
terranix = pkgs.callPackage (pkgs.fetchgit {
url = "https://github.com/mrVanDalo/terranix.git";
rev = "dfbf4d1fae08da8052ff880c5d02b2eb5857d54c";
sha256 = "1qilbvldlq7ybxa3yx99hb8vbmj0sk5x9qqxa4f1czpzj2mja0fn";
}) { };
terraform = pkgs.writers.writeBashBin "terraform" ''
export TF_VAR_hcloud_api_token=`${pkgs.pass}/bin/pass development/hetzner.com/api-token`
${pkgs.terraform_0_12}/bin/terraform "$@"
'';
in pkgs.mkShell {
buildInputs = [
terranix
terraform
(pkgs.writers.writeBashBin "prepare" ''
set -e
set -o pipefail
${pkgs.openssh}/bin/ssh-keygen -P "" -f ${toString ./.}/sshkey
'')
(pkgs.writers.writeBashBin "build" ''
set -e
set -o pipefail
${terranix}/bin/terranix | ${pkgs.jq}/bin/jq '.' > config.tf.json
${terraform}/bin/terraform init
${terraform}/bin/terraform apply
'')
(pkgs.writers.writeBashBin "cleanup" ''
${terraform}/bin/terraform destroy
rm ${toString ./.}/config.tf.json
rm ${toString ./.}/sshkey
rm ${toString ./.}/sshkey.pub
rm ${toString ./.}/terraform.tfstate*
'')
];
}