add conversion from TF

Signed-off-by: Kiara Grouwstra <kiara@procolix.eu>
This commit is contained in:
Kiara Grouwstra 2025-11-10 23:18:52 +01:00
parent f1c8b35dd7
commit d7dbe144ae
Signed by: kiara
SSH key fingerprint: SHA256:COspvLoLJ5WC5rFb9ZDe5urVCkK4LJZOsjfF4duRJFU
5 changed files with 638 additions and 0 deletions

View file

@ -31,3 +31,9 @@ jobs:
steps:
- uses: actions/checkout@v4
- run: nix-shell --run 'nix-unit ./deployment/data-model-test.nix'
check-tf-conversion:
runs-on: native
steps:
- uses: actions/checkout@v4
- run: nix-shell --run 'nix-unit ./deployment/tf-conversion-test.nix'

View file

@ -0,0 +1,364 @@
let
inherit (import ../default.nix { }) pkgs;
inherit (pkgs.callPackage ./utils.nix { }) cast evalOption;
inherit (pkgs.callPackage ./tf-conversion.nix { })
wrapTfType
wrapTfAttr
wrapTfAttrs
wrapTfSourceSchemas
wrapTfProvider
wrapTfProviderSchema
extractProviderSchemas
tfAttrType
fromTfTypes
fromTfAttr
fromTfAttrs
fromTfSourceSchemas
fromTfProvider
fromTfProviderSchema
;
inherit (pkgs) lib;
inherit (lib)
mkOption
types
;
inherit (types)
submodule
listOf
str
;
allowSensitive = true;
# example attributes as obtained by a command like `tofu providers schema -json | jq '.provider_schemas["registry.opentofu.org/hashicorp/external"].data_source_schemas.external.block.attributes'`
tfAttrs = {
id = {
type = "string";
description = "The id of the data source.";
description_kind = "plain";
computed = true;
};
program = {
type = [
"list"
"string"
];
description = "A list of strings.";
description_kind = "plain";
required = true;
sensitive = true;
};
query = {
type = [
"map"
"string"
];
deprecated = true;
description = "A map of string values.";
description_kind = "plain";
optional = true;
};
};
converted = fromTfAttrs allowSensitive tfAttrs;
in
{
_class = "nix-unit";
test-wrapTfType = {
expr = wrapTfType [
"list"
"string"
];
expected = "list(string)";
};
test-wrapTfAttr = {
expr = wrapTfAttr tfAttrs.program;
expected = {
type = "list(string)";
description = "A list of strings.";
sensitive = true;
};
};
test-wrapTfAttrs = {
expr = wrapTfAttrs tfAttrs;
expected.variable = {
program = {
description = "A list of strings.";
sensitive = true;
type = "list(string)";
};
query = {
deprecated = true;
description = "A map of string values.";
default = null;
type = "map(string)";
};
};
};
test-wrapTfSourceSchemas = {
expr = wrapTfSourceSchemas { external.block.attributes = tfAttrs; };
expected.external.variable = {
program = {
description = "A list of strings.";
sensitive = true;
type = "list(string)";
};
query = {
deprecated = true;
description = "A map of string values.";
default = null;
type = "map(string)";
};
};
};
test-wrapTfProvider = {
expr = wrapTfProvider { data_source_schemas.external.block.attributes = tfAttrs; };
expected.data_source_schemas.external.variable = {
program = {
description = "A list of strings.";
sensitive = true;
type = "list(string)";
};
query = {
deprecated = true;
description = "A map of string values.";
default = null;
type = "map(string)";
};
};
};
test-wrapTfProviderSchema = {
expr = wrapTfProviderSchema {
provider_schemas."registry.opentofu.org/hashicorp/external".data_source_schemas.external.block.attributes =
tfAttrs;
};
expected."hashicorp/external".data_source_schemas.external.variable = {
program = {
description = "A list of strings.";
sensitive = true;
type = "list(string)";
};
query = {
deprecated = true;
description = "A map of string values.";
default = null;
type = "map(string)";
};
};
};
test-tfAttrType = {
expr = cast tfAttrType {
type = [
"map"
"string"
];
required = true;
description = "dummy";
};
expected = {
computed = false;
deprecated = false;
description = "dummy";
description_kind = "plain";
optional = false;
required = true;
sensitive = false;
type = [
"map"
"string"
];
};
};
test-fromTfTypes = {
expr =
(fromTfTypes [
"list"
"string"
]).description;
expected = (listOf str).description;
};
test-fromTfAttr-convert = {
expr = (fromTfAttr tfAttrs.program).description;
expected = "A list of strings.";
};
test-fromTfAttr-fit = {
expr = evalOption (fromTfAttr tfAttrs.program) [
"echo"
"123"
];
expected = [
"echo"
"123"
];
};
test-fromTfAttrs-convert = {
expr = converted.description;
expected =
(submodule {
options = {
program = mkOption {
type = listOf str;
};
};
}).description;
};
test-fromTfAttrs-fit = {
expr = cast converted { program = [ "foo" ]; };
expected.program = [ "foo" ];
};
test-fromTfSourceSchemas = {
expr = cast (fromTfSourceSchemas allowSensitive {
external = {
version = 0;
block = {
attributes = tfAttrs;
description = "bar foo";
description_kind = "plain";
};
};
}) { external.program = [ "foo" ]; };
expected.external.program = [ "foo" ];
};
test-fromTfProvider = {
expr = cast (fromTfProvider allowSensitive {
data_source_schemas.external = {
version = 0;
block = {
attributes = tfAttrs;
description = "bar foo";
description_kind = "plain";
};
};
}) { data_source_schemas.external.program = [ "foo" ]; };
expected.data_source_schemas.external.program = [ "foo" ];
};
test-fromTfProviderSchema = {
expr = cast (fromTfProviderSchema allowSensitive {
provider_schemas."registry.opentofu.org/hashicorp/external".data_source_schemas.external = {
version = 0;
block = {
attributes = tfAttrs;
description = "bar foo";
description_kind = "plain";
};
};
}) { "hashicorp/external".data_source_schemas.external.program = [ "foo" ]; };
expected."hashicorp/external".data_source_schemas.external.program = [ "foo" ];
};
test-extractProviderSchemas = {
expr =
let
inherit (extractProviderSchemas allowSensitive (p: [ p.external ])) schema wrapped converted;
in
{
inherit schema wrapped;
config = cast converted {
"hashicorp/external".data_source_schemas.external = {
program = [ "foo" ];
query.a = "b";
working_dir = "bar";
};
};
};
expected = {
config."hashicorp/external".data_source_schemas.external = {
program = [ "foo" ];
query.a = "b";
working_dir = "bar";
};
schema = {
format_version = "1.0";
provider_schemas = {
"registry.opentofu.org/hashicorp/external" = {
data_source_schemas = {
external = {
block = {
attributes = {
id = {
computed = true;
description = "The id of the data source. This will always be set to `-`";
description_kind = "plain";
type = "string";
};
program = {
description = "A list of strings, whose first element is the program to run and whose subsequent elements are optional command line arguments to the program. Terraform does not execute the program through a shell, so it is not necessary to escape shell metacharacters nor add quotes around arguments containing spaces.";
description_kind = "plain";
required = true;
type = [
"list"
"string"
];
};
query = {
description = "A map of string values to pass to the external program as the query arguments. If not supplied, the program will receive an empty object as its input.";
description_kind = "plain";
optional = true;
type = [
"map"
"string"
];
};
result = {
computed = true;
description = "A map of string values returned from the external program.";
description_kind = "plain";
type = [
"map"
"string"
];
};
working_dir = {
description = "Working directory of the program. If not supplied, the program will run in the current directory.";
description_kind = "plain";
optional = true;
type = "string";
};
};
description = "The `external` data source allows an external program implementing a specific protocol (defined below) to act as a data source, exposing arbitrary data for use elsewhere in the Terraform configuration.\n\n**Warning** This mechanism is provided as an \"escape hatch\" for exceptional situations where a first-class Terraform provider is not more appropriate. Its capabilities are limited in comparison to a true data source, and implementing a data source via an external program is likely to hurt the portability of your Terraform configuration by creating dependencies on external programs and libraries that may not be available (or may need to be used differently) on different operating systems.\n\n**Warning** Terraform Enterprise does not guarantee availability of any particular language runtimes or external programs beyond standard shell utilities, so it is not recommended to use this data source within configurations that are applied within Terraform Enterprise.";
description_kind = "plain";
};
version = 0;
};
};
provider = {
block.description_kind = "plain";
version = 0;
};
};
};
};
wrapped."hashicorp/external".data_source_schemas.external.variable = {
program = {
description = "A list of strings, whose first element is the program to run and whose subsequent elements are optional command line arguments to the program. Terraform does not execute the program through a shell, so it is not necessary to escape shell metacharacters nor add quotes around arguments containing spaces.";
type = "list(string)";
};
query = {
description = "A map of string values to pass to the external program as the query arguments. If not supplied, the program will receive an empty object as its input.";
type = "map(string)";
default = null;
};
working_dir = {
description = "Working directory of the program. If not supplied, the program will run in the current directory.";
type = "string";
default = null;
};
};
};
};
}

View file

@ -0,0 +1,254 @@
{
lib,
pkgs,
...
}:
let
inherit (pkgs.callPackage ./utils.nix { }) cast;
inherit (lib)
filterAttrs
flatten
mapAttrs
mkOption
removeAttrs
throwIf
types
;
inherit (types)
attrsOf
bool
enum
float
ints
number
submodule
listOf
str
;
sourceSchemas = filterAttrs (
k: _:
lib.elem k [
"data_source_schemas"
"resource_schemas"
]
);
in
rec {
# helpers to obtain TF provider data into `tfvars.json` to easily wrap TF resources / data sources
wrapTfType =
tfType: lib.foldr (typ: acc: if acc == null then typ else "${typ}(${acc})") null (flatten tfType);
wrapTfAttr =
{ type, ... }@attr:
removeAttrs attr [
"description_kind"
"computed"
"required"
"optional"
]
// {
type = wrapTfType type;
}
// (if (attr ? optional) then { default = null; } else { });
wrapTfAttrs = tfAttrs: {
variable = lib.mapAttrs (_: wrapTfAttr) (filterAttrs (_: v: !(v ? computed)) tfAttrs);
};
wrapTfSourceSchemas = mapAttrs (_: schemas: wrapTfAttrs schemas.block.attributes);
wrapTfProvider = schema: mapAttrs (_: wrapTfSourceSchemas) (sourceSchemas schema);
wrapTfProviderSchema =
output:
lib.mapAttrs' (k: v: {
name = lib.removePrefix "registry.opentofu.org/" k;
value = wrapTfProvider v;
}) output.provider_schemas;
# converting types
tfAttrType = submodule {
options = {
"type" = mkOption {
type = types.either str (types.listOf str);
};
"description" = mkOption {
type = str;
# default = "";
};
"description_kind" = mkOption {
type = enum [ "plain" ];
default = "plain";
};
"computed" = mkOption {
type = bool;
default = false;
};
"sensitive" = mkOption {
type = bool;
default = false;
};
"required" = mkOption {
type = bool;
default = false;
};
"optional" = mkOption {
type = bool;
default = false;
};
"deprecated" = mkOption {
type = bool;
default = false;
};
};
};
tfAttrsType = attrsOf tfAttrType;
# converting TF to nix modules
fromTfTypes =
types:
let
typ = (lib.head types);
typs = (lib.tail types);
rest = (fromTfTypes typs);
in
{
inherit bool number;
"string" = str;
"int32" = ints.s32;
"int64" = ints.s32;
"float32" = float;
"float64" = float;
"dynamic" = types.unspecified;
"list" = listOf rest;
"map" = attrsOf rest;
"set" = listOf rest; # no unordered type in nix
object = throw "to be implemented";
tuple = throw "to be implemented";
}
.${typ};
fromTfAttr =
tfAttr:
let
inherit (cast tfAttrType tfAttr)
type
description
computed
optional
required
;
types = flatten type;
in
throwIf computed "computed TF attributes cannot be translated to Nix" mkOption (
(
if optional then
{ default = null; }
else
throwIf (!required) "either of required or optional must be true" { }
)
// {
inherit description;
type = fromTfTypes types;
}
);
fromTfAttrs =
allowSensitive: tfAttrs:
submodule {
options = lib.mapAttrs (_: fromTfAttr) (
lib.filterAttrs (
_: v: (if allowSensitive then true else !(v ? sensitive)) && !(v ? computed) && !(v ? deprecated)
) tfAttrs
);
};
fromTfSourceSchemas =
allowSensitive: schemas:
submodule {
options = mapAttrs (
_: { block, ... }: mkOption { type = fromTfAttrs allowSensitive block.attributes; }
) schemas;
};
fromTfProvider =
allowSensitive: schema:
submodule {
options = mapAttrs (_: schemas: mkOption { type = fromTfSourceSchemas allowSensitive schemas; }) (
sourceSchemas schema
);
};
fromTfProviderSchema =
allowSensitive:
{ provider_schemas, ... }:
submodule {
options = lib.mapAttrs' (k: schema: {
name = lib.removePrefix "registry.opentofu.org/" k;
value = mkOption { type = fromTfProvider allowSensitive schema; };
}) provider_schemas;
};
# extract from TF data
extractProviderSchemas =
allowSensitive: pluginFn:
let
tf = (pkgs.callPackage ./tf.nix { }).withPlugins pluginFn;
usedPlugins = pluginFn tf.plugins;
mainTf = pkgs.writers.writeJSON "main.tf.json" {
terraform.required_providers = lib.listToAttrs (
lib.lists.map (
{
meta,
owner,
version,
...
}:
let
name = lib.last (lib.splitString "/" meta.homepage);
in
{
inherit name;
value = {
source = "${owner}/${name}";
version = "= ${version}";
};
}
) usedPlugins
);
};
schemas = pkgs.stdenv.mkDerivation {
name = "tf-extract";
src = pkgs.linkFarm "tf-providers-main" [
{
name = "main.tf.json";
path = mainTf;
}
];
buildInputs = [
tf
pkgs.jq
];
buildPhase = ''
tofu init 1>/dev/null
tofu providers schema -json | jq . > ./schemas.json
'';
installPhase = ''
mkdir -p $out/share
cp ./schemas.json $out/share
'';
};
schema = lib.importJSON (builtins.storePath (builtins.toPath "${schemas}/share/schemas.json"));
wrapped = wrapTfProviderSchema schema;
converted = fromTfProviderSchema allowSensitive schema;
in
{
inherit schema wrapped converted;
};
}

View file

@ -2,6 +2,7 @@ let
inherit (import ../default.nix { }) pkgs;
inherit (pkgs.callPackage ./utils.nix { })
mapKeys
cast
evalOption
toBash
withPackages
@ -22,6 +23,17 @@ in
};
};
test-cast = {
expr = cast (submodule {
options = {
a = mkOption { default = 2; };
};
}) { };
expected = {
a = 2;
};
};
test-evalOption = {
expr = evalOption (mkOption {
type = submodule {

View file

@ -38,6 +38,8 @@ rec {
];
}).config.opt;
cast = type: evalOption (lib.mkOption { inherit type; });
toBash =
v:
lib.replaceStrings [ "\"" ] [ "\\\"" ] (