This commit is contained in:
David Arnold 2021-05-13 17:27:08 -04:00
parent c3fa598922
commit db6d83c61e
No known key found for this signature in database
GPG key ID: 6D6A936E69C59D08
53 changed files with 1916 additions and 1599 deletions

View file

@ -3,13 +3,15 @@
with lib;
{
copyDockerImages = { images, dest, args ? "" }:
pkgs.writeScript "copy-docker-images.sh" (concatMapStrings (image: ''
#!${pkgs.runtimeShell}
copyDockerImages = { images, dest, args ? "" }:
pkgs.writeScript "copy-docker-images.sh" (concatMapStrings
(image: ''
#!${pkgs.runtimeShell}
set -e
set -e
echo "copying '${image.imageName}:${image.imageTag}' to '${dest}/${image.imageName}:${image.imageTag}'"
${pkgs.skopeo}/bin/skopeo copy ${args} $@ docker-archive:${image} ${dest}/${image.imageName}:${image.imageTag}
'') images);
echo "copying '${image.imageName}:${image.imageTag}' to '${dest}/${image.imageName}:${image.imageTag}'"
${pkgs.skopeo}/bin/skopeo copy ${args} $@ docker-archive:${image} ${dest}/${image.imageName}:${image.imageTag}
'')
images);
}

View file

@ -21,31 +21,39 @@ rec {
else mkOverride priority value;
loadYAML = path: importJSON (pkgs.runCommand "yaml-to-json" {
} "${pkgs.remarshal}/bin/remarshal -i ${path} -if yaml -of json > $out");
loadYAML = path: importJSON (pkgs.runCommand "yaml-to-json"
{ } "${pkgs.remarshal}/bin/remarshal -i ${path} -if yaml -of json > $out");
toYAML = config: builtins.readFile (pkgs.runCommand "to-yaml" {
buildInputs = [pkgs.remarshal];
} ''
toYAML = config: builtins.readFile (pkgs.runCommand "to-yaml"
{
buildInputs = [ pkgs.remarshal ];
} ''
remarshal -i ${pkgs.writeText "to-json" (builtins.toJSON config)} -if json -of yaml > $out
'');
toMultiDocumentYaml = name: documents: pkgs.runCommand name {
buildInputs = [ pkgs.remarshal ];
} (concatMapStringsSep "\necho --- >> $out\n" (d:
"remarshal -i ${builtins.toFile "doc" (builtins.toJSON d)} -if json -of yaml >> $out"
) documents);
toMultiDocumentYaml = name: documents: pkgs.runCommand name
{
buildInputs = [ pkgs.remarshal ];
}
(concatMapStringsSep "\necho --- >> $out\n"
(d:
"remarshal -i ${builtins.toFile "doc" (builtins.toJSON d)} -if json -of yaml >> $out"
)
documents);
toBase64 = value:
builtins.readFile
(pkgs.runCommand "value-to-b64" {} "echo -n '${value}' | ${pkgs.coreutils}/bin/base64 -w0 > $out");
(pkgs.runCommand "value-to-b64" { } "echo -n '${value}' | ${pkgs.coreutils}/bin/base64 -w0 > $out");
exp = base: exp: foldr (value: acc: acc * base) 1 (range 1 exp);
octalToDecimal = value: (foldr (char: acc: {
i = acc.i + 1;
value = acc.value + (toInt char) * (exp 8 acc.i);
}) {i = 0; value = 0;} (stringToCharacters value)).value;
octalToDecimal = value: (foldr
(char: acc: {
i = acc.i + 1;
value = acc.value + (toInt char) * (exp 8 acc.i);
})
{ i = 0; value = 0; }
(stringToCharacters value)).value;
submoduleWithSpecialArgs = opts: specialArgs:
let
@ -58,13 +66,15 @@ rec {
merge = loc: defs:
let
coerce = def: if isFunction def then def else { config = def; };
modules = opts' ++ map (def: { _file = def.file; imports = [(coerce def.value)]; }) defs;
in (evalModules {
modules = opts' ++ map (def: { _file = def.file; imports = [ (coerce def.value) ]; }) defs;
in
(evalModules {
inherit modules specialArgs;
prefix = loc;
}).config;
getSubOptions = prefix: (evalModules
{ modules = opts'; inherit prefix specialArgs;
{
modules = opts'; inherit prefix specialArgs;
# This is a work-around due to the fact that some sub-modules,
# such as the one included in an attribute set, expects a "args"
# attribute to be given to the sub-module. As the option
@ -87,16 +97,19 @@ rec {
functor = (defaultFunctor name) // {
# Merging of submodules is done as part of mergeOptionDecls, as we have to annotate
# each submodule with its location.
payload = [];
binOp = lhs: rhs: [];
payload = [ ];
binOp = lhs: rhs: [ ];
};
};
coerceListOfSubmodulesToAttrs = submodule: keyFn: let
coerceListOfSubmodulesToAttrs = submodule: keyFn:
let
mergeValuesByFn = keyFn: values:
listToAttrs (map (value:
nameValuePair (toString (keyFn value)) value
) values);
listToAttrs (map
(value:
nameValuePair (toString (keyFn value)) value
)
values);
# Either value of type `finalType` or `coercedType`, the latter is
# converted to `finalType` using `coerceFunc`.
@ -113,14 +126,16 @@ rec {
else
let coerced = coerceFunc val; in assert finalType.check coerced; coerced;
in finalType.merge loc (map (def: def // { value = coerceVal def.value; }) defs);
in
finalType.merge loc (map (def: def // { value = coerceVal def.value; }) defs);
getSubOptions = finalType.getSubOptions;
getSubModules = finalType.getSubModules;
substSubModules = m: coercedTo coercedType coerceFunc (finalType.substSubModules m);
typeMerge = t1: t2: null;
functor = (defaultFunctor name) // { wrapped = finalType; };
};
in coercedTo
in
coercedTo
(types.listOf (types.submodule submodule))
(mergeValuesByFn keyFn)
(types.attrsOf (types.submodule submodule));

View file

@ -13,19 +13,22 @@ with lib;
, namespace ? null
# values to pass to chart
, values ? {}
, values ? { }
# kubernetes version to template chart for
, kubeVersion ? null }: let
, kubeVersion ? null
}:
let
valuesJsonFile = builtins.toFile "${name}-values.json" (builtins.toJSON values);
in stdenvNoCC.mkDerivation {
in
stdenvNoCC.mkDerivation {
name = "${name}.json";
buildCommand = ''
# template helm file and write resources to yaml
helm template "${name}" \
${optionalString (kubeVersion != null) "--api-versions ${kubeVersion}"} \
${optionalString (namespace != null) "--namespace ${namespace}"} \
${optionalString (values != {}) "-f ${valuesJsonFile}"} \
${optionalString (values != { }) "-f ${valuesJsonFile}"} \
${chart} >resources.yaml
# split multy yaml file into multiple files

View file

@ -1,9 +1,9 @@
{ stdenvNoCC, lib, kubernetes-helm, cacert }:
let
cleanName = name: lib.replaceStrings ["/"] ["-"] name;
cleanName = name: lib.replaceStrings [ "/" ] [ "-" ] name;
in {
in
{
# name of the chart
chart
@ -13,20 +13,21 @@ in {
# version of the chart
, version ? null
# chart hash
# chart hash
, sha256
# whether to extract chart
# whether to extract chart
, untar ? true
# use custom charts repo
# use custom charts repo
, repo ? null
# pass --verify to helm chart
# pass --verify to helm chart
, verify ? false
# pass --devel to helm chart
, devel ? false }: stdenvNoCC.mkDerivation {
# pass --devel to helm chart
, devel ? false
}: stdenvNoCC.mkDerivation {
name = "${cleanName chart}-${if version == null then "dev" else version}";
buildCommand = ''

View file

@ -1,9 +1,9 @@
{ pkgs ? import <nixpkgs> {} }:
{ pkgs ? import <nixpkgs> { } }:
let
fetchhelm = pkgs.callPackage ./fetchhelm.nix { };
chart2json = pkgs.callPackage ./chart2json.nix { };
in rec {
fetchhelm = pkgs.callPackage ./fetchhelm.nix { };
chart2json = pkgs.callPackage ./chart2json.nix { };
in
rec {
postgresql-chart = fetchhelm {
chart = "stable/postgresql";
version = "0.18.1";

View file

@ -4,7 +4,7 @@ with lib;
rec {
# TODO: refactor with mkOptionType
mkSecretOption = {description ? "", default ? {}, allowNull ? true}: mkOption {
mkSecretOption = { description ? "", default ? { }, allowNull ? true }: mkOption {
inherit description;
type = (if allowNull then types.nullOr else id) (types.submodule {
options = {
@ -24,7 +24,7 @@ rec {
}));
};
});
default = if default == null then null else {};
default = if default == null then null else { };
};
secretToEnv = value: {
@ -34,7 +34,7 @@ rec {
};
# Creates kubernetes list from a list of kubernetes objects
mkList = { items, labels ? {} }: {
mkList = { items, labels ? { } }: {
kind = "List";
apiVersion = "v1";
@ -42,19 +42,23 @@ rec {
};
# Creates hashed kubernetes list from a list of kubernetes objects
mkHashedList = { items, labels ? {} }: let
hash = builtins.hashString "sha1" (builtins.toJSON items);
mkHashedList = { items, labels ? { } }:
let
hash = builtins.hashString "sha1" (builtins.toJSON items);
labeledItems = map (item: recursiveUpdate item {
metadata.labels."kubenix/hash" = hash;
}) items;
labeledItems = map
(item: recursiveUpdate item {
metadata.labels."kubenix/hash" = hash;
})
items;
in mkList {
items = labeledItems;
labels = {
"kubenix/hash" = hash;
} // labels;
};
in
mkList {
items = labeledItems;
labels = {
"kubenix/hash" = hash;
} // labels;
};
toBase64 = lib.toBase64;
octalToDecimal = lib.octalToDecimal;