Big data configuration
This commit is contained in:
parent
43f698a329
commit
72df315b4d
10 changed files with 634 additions and 2 deletions
138
nixos/thor.nix
138
nixos/thor.nix
|
|
@ -3,6 +3,46 @@
|
|||
# and in the NixOS manual (accessible by running ‘nixos-help’).
|
||||
|
||||
{ config, pkgs, lib, ... }:
|
||||
|
||||
let sparkConfDir = pkgs.stdenv.mkDerivation {
|
||||
name = "spark-config";
|
||||
dontUnpack = true;
|
||||
installPhase = ''
|
||||
# source standard environment
|
||||
. $stdenv/setup
|
||||
|
||||
# shorthands
|
||||
base_conf=${pkgs.spark}/lib/${pkgs.spark.untarDir}/conf/
|
||||
|
||||
# create output dirs for new derivation
|
||||
mkdir -p $out/
|
||||
|
||||
# link unchanged files from the original gnome-session
|
||||
for f in $base_conf/*.template ; do
|
||||
ln -sf $f $out/
|
||||
done
|
||||
|
||||
# change selected files
|
||||
cp $out/log4j.properties{.template,}
|
||||
|
||||
cat > $out/spark-env.sh <<- STOP
|
||||
export JAVA_HOME="${pkgs.jdk8}"
|
||||
export SPARK_HOME="${pkgs.spark}/lib/${pkgs.spark.untarDir}"
|
||||
export SPARK_DIST_CLASSPATH=$(${pkgs.hadoop}/bin/hadoop classpath)
|
||||
export PYSPARK_PYTHON="${pkgs.python3Packages.python}/bin/${pkgs.python3Packages.python.executable}"
|
||||
export PYTHONPATH="\$PYTHONPATH:$PYTHONPATH"
|
||||
export SPARKR_R_SHELL="${pkgs.R}/bin/R"
|
||||
export PATH="\$PATH:${pkgs.R}/bin"
|
||||
STOP
|
||||
|
||||
cat > $out/spark-defaults.conf <<- STOP
|
||||
spark.eventLog.enabled true
|
||||
spark.eventLog.dir hdfs://localhost:/logs/spark
|
||||
spark.history.fs.logDirectory hdfs://localhost:/logs/spark
|
||||
STOP
|
||||
'';
|
||||
};
|
||||
in
|
||||
{
|
||||
imports = [
|
||||
<nixos-hardware/common/cpu/amd>
|
||||
|
|
@ -102,7 +142,7 @@
|
|||
};
|
||||
};
|
||||
clamav = { daemon.enable = true; updater.enable = true; };
|
||||
dbus.packages = with pkgs; [ gnome.dconf ];
|
||||
dbus.packages = with pkgs; [ pkgs.dconf ];
|
||||
gnome.gnome-keyring.enable = true;
|
||||
gvfs = { enable = true; package = pkgs.gnome3.gvfs; };
|
||||
fwupd.enable = true;
|
||||
|
|
@ -154,11 +194,104 @@
|
|||
};
|
||||
# gnome.gnome-remote-desktop.enable = true;
|
||||
zerotierone = { enable = true; joinNetworks = [ "8056c2e21cf9c753" ]; };
|
||||
|
||||
|
||||
|
||||
spark = {
|
||||
master = {
|
||||
enable = true;
|
||||
restartIfChanged = true;
|
||||
};
|
||||
worker = {
|
||||
enable = true;
|
||||
restartIfChanged = true;
|
||||
};
|
||||
confDir = sparkConfDir;
|
||||
};
|
||||
|
||||
hadoop = {
|
||||
coreSite = {
|
||||
"fs.defaultFS" = "hdfs://localhost:8020";
|
||||
};
|
||||
hdfsSite = {
|
||||
"dfs.namenode.rpc-bind-host" = "0.0.0.0";
|
||||
"dfs.permissions" = "false";
|
||||
};
|
||||
|
||||
hdfs = {
|
||||
namenode = {
|
||||
enable = true;
|
||||
formatOnInit = true;
|
||||
restartIfChanged = true;
|
||||
};
|
||||
datanode = {
|
||||
enable = true;
|
||||
restartIfChanged = true;
|
||||
};
|
||||
journalnode = {
|
||||
enable = true;
|
||||
restartIfChanged = true;
|
||||
};
|
||||
zkfc = {
|
||||
enable = true;
|
||||
restartIfChanged = true;
|
||||
};
|
||||
httpfs = {
|
||||
enable = true;
|
||||
restartIfChanged = true;
|
||||
};
|
||||
};
|
||||
yarn = {
|
||||
resourcemanager.enable = true;
|
||||
nodemanager.enable = true;
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
ethminer = {
|
||||
enable = false;
|
||||
wallet = "0x73b788882e1C182123333f42FFf275B7dd7f51bb";
|
||||
toolkit = "opencl";
|
||||
rig = "thor";
|
||||
pool = "eth-eu1.nanopool.org";
|
||||
stratumPort = 9999;
|
||||
|
||||
registerMail = "";
|
||||
};
|
||||
|
||||
|
||||
# teamviewer.enable = true;
|
||||
};
|
||||
|
||||
systemd.services.spark-history = {
|
||||
path = with pkgs; [ procps openssh nettools ];
|
||||
description = "spark history service.";
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
restartIfChanged = true;
|
||||
environment = {
|
||||
SPARK_CONF_DIR = sparkConfDir;
|
||||
SPARK_LOG_DIR = "/var/log/spark";
|
||||
};
|
||||
serviceConfig = {
|
||||
Type = "forking";
|
||||
User = "spark";
|
||||
Group = "spark";
|
||||
WorkingDirectory = "${pkgs.spark}/lib/${pkgs.spark.untarDir}";
|
||||
ExecStart = "${pkgs.spark}/lib/${pkgs.spark.untarDir}/sbin/start-history-server.sh";
|
||||
ExecStop = "${pkgs.spark}/lib/${pkgs.spark.untarDir}/sbin/stop-history-server.sh";
|
||||
TimeoutSec = 300;
|
||||
StartLimitBurst = 10;
|
||||
Restart = "always";
|
||||
};
|
||||
};
|
||||
|
||||
services.teamviewer.enable = true;
|
||||
|
||||
security = {
|
||||
pam.services."kde" = {
|
||||
enableKwallet = true;
|
||||
};
|
||||
rtkit.enable = true;
|
||||
sudo.extraConfig = ''
|
||||
Defaults pwfeedback
|
||||
|
|
@ -187,7 +320,8 @@
|
|||
allowUnfree = true;
|
||||
packageOverrides = pkgs: {
|
||||
steam = pkgs.steam.override {
|
||||
extraPkgs = pkgs: with pkgs; [ icu ];
|
||||
extraPkgs = pkgs: with pkgs; [ ];
|
||||
extraLibraries = pkgs: with pkgs; [ fontconfig.lib icu freetype ];
|
||||
};
|
||||
};
|
||||
# cudaSupport = true;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue