Copy the source code from https://github.com/hashicorp/hcl/tree/v2.13.0.

Change-Id: Ica3f2a3ef03f6cc12fb00c081b1f60287cda9d0b
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..9f6c23b
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,212 @@
+# HCL Changelog
+
+## v2.13.0 (June 22, 2022)
+
+### Enhancements
+
+* hcl: `hcl.Diagnostic` how has an additional field `Extra` which is intended for carrying arbitrary supporting data ("extra information") related to the diagnostic message, intended to allow diagnostic renderers to optionally tailor the presentation of messages for particular situations. ([#539](https://github.com/hashicorp/hcl/pull/539))
+* hclsyntax: When an error occurs during a function call, the returned diagnostics will include _extra information_ (as described in the previous point) about which function was being called and, if the message is about an error returned by the function itself, that raw `error` value without any post-processing. ([#539](https://github.com/hashicorp/hcl/pull/539))
+
+### Bugs Fixed
+
+* hclwrite: Fixed a potential data race for any situation where `hclwrite.Format` runs concurrently with itself. ([#534](https://github.com/hashicorp/hcl/pull/534))
+
+## v2.12.0 (April 22, 2022)
+
+### Enhancements
+
+* hclsyntax: Evaluation of conditional expressions will now produce more precise error messages about inconsistencies between the types of the true and false result expressions, particularly in cases where both are of the same structural type kind but differ in their nested elements. ([#530](https://github.com/hashicorp/hcl/pull/530))
+* hclsyntax: The lexer will no longer allocate a small object on the heap for each token. Instead, in that situation it will allocate only when needed to return a diagnostic message with source location information. ([#490](https://github.com/hashicorp/hcl/pull/490))
+* hclwrite: New functions `TokensForTuple`, `TokensForObject`, and `TokensForFunctionCall` allow for more easily constructing the three constructs which are supported for static analysis and which HCL-based languages typically use in contexts where an expression is used only for its syntax, and not evaluated to produce a real value. For example, these new functions together are sufficient to construct all valid type constraint expressions from [the Type Expressions Extension](./ext/typeexpr/), which is the basis of variable type constraints in the Terraform language at the time of writing. ([#502](https://github.com/hashicorp/hcl/pull/502))
+* json: New functions `IsJSONExpression` and `IsJSONBody` to determine if a given expression or body was created by the JSON syntax parser. In normal situations it's better not to worry about what syntax a particular expression/body originated in, but this can be useful in some trickier cases where an application needs to shim for backwards-compatibility or for static analysis that needs to have special handling of the JSON syntax's embedded expression/template conventions. ([#524](https://github.com/hashicorp/hcl/pull/524))
+
+### Bugs Fixed
+
+* gohcl: Fix docs about supported types for blocks. ([#507](https://github.com/hashicorp/hcl/pull/507))
+
+## v2.11.1 (December 1, 2021)
+
+### Bugs Fixed
+
+* hclsyntax: The type for an upgraded unknown value with a splat expression cannot be known ([#495](https://github.com/hashicorp/hcl/pull/495))
+
+## v2.11.0 (December 1, 2021)
+
+### Enhancements
+
+* hclsyntax: Various error messages related to unexpectedly reaching end of file while parsing a delimited subtree will now return specialized messages describing the opening tokens as "unclosed", instead of returning a generic diagnostic that just happens to refer to the empty source range at the end of the file. This gives better feedback when error messages are being presented alongside a source code snippet, as is common in HCL-based applications, because it shows which innermost container the parser was working on when it encountered the error. ([#492](https://github.com/hashicorp/hcl/pull/492))
+
+### Bugs Fixed
+
+* hclsyntax: Upgrading an unknown single value to a list using a splat expression must return unknown ([#493](https://github.com/hashicorp/hcl/pull/493))
+
+## v2.10.1 (July 21, 2021)
+
+* dynblock: Decode unknown dynamic blocks in order to obtain any diagnostics even though the decoded value is not used ([#476](https://github.com/hashicorp/hcl/pull/476))
+* hclsyntax: Calling functions is now more robust in the face of an incorrectly-implemented function which returns a `function.ArgError` whose argument index is out of range for the length of the arguments. Previously this would often lead to a panic, but now it'll return a less-precice error message instead. Functions that return out-of-bounds argument indices still ought to be fixed so that the resulting error diagnostics can be as precise as possible. ([#472](https://github.com/hashicorp/hcl/pull/472))
+* hclsyntax: Ensure marks on unknown values are maintained when processing string templates. ([#478](https://github.com/hashicorp/hcl/pull/478))
+* hcl: Improved error messages for various common error situtions in `hcl.Index` and `hcl.GetAttr`. These are part of the implementation of indexing and attribute lookup in the native syntax expression language too, so the new error messages will apply to problems using those operators. ([#474](https://github.com/hashicorp/hcl/pull/474))
+
+## v2.10.0 (April 20, 2021)
+
+### Enhancements
+
+* dynblock,hcldec: Using dynblock in conjunction with hcldec can now decode blocks with unknown dynamic for_each arguments as entirely unknown values ([#461](https://github.com/hashicorp/hcl/pull/461))
+* hclsyntax: Some syntax errors during parsing of the inside of `${` ... `}` template interpolation sequences will now produce an extra hint message about the need to escape as `$${` when trying to include interpolation syntax for other languages like shell scripting, AWS IAM policies, etc. ([#462](https://github.com/hashicorp/hcl/pull/462))
+
+## v2.9.1 (March 10, 2021)
+
+### Bugs Fixed
+
+* hclsyntax: Fix panic for marked index value. ([#451](https://github.com/hashicorp/hcl/pull/451))
+
+## v2.9.0 (February 23, 2021)
+
+### Enhancements
+
+* HCL's native syntax and JSON scanners -- and thus all of the other parsing components that build on top of them -- are now using Unicode 13 rules for text segmentation when counting text characters for the purpose of reporting source location columns. Previously HCL was using Unicode 12. Unicode 13 still uses the same algorithm but includes some additions to the character tables the algorithm is defined in terms of, to properly categorize new characters defined in Unicode 13.
+
+## v2.8.2 (January 6, 2021)
+
+### Bugs Fixed
+
+* hclsyntax: Fix panic for marked collection splat. ([#436](https://github.com/hashicorp/hcl/pull/436))
+* hclsyntax: Fix panic for marked template loops. ([#437](https://github.com/hashicorp/hcl/pull/437))
+* hclsyntax: Fix `for` expression marked conditional. ([#438](https://github.com/hashicorp/hcl/pull/438))
+* hclsyntax: Mark objects with keys that are sensitive. ([#440](https://github.com/hashicorp/hcl/pull/440))
+
+## v2.8.1 (December 17, 2020)
+ 
+### Bugs Fixed
+
+* hclsyntax: Fix panic when expanding marked function arguments. ([#429](https://github.com/hashicorp/hcl/pull/429))
+* hclsyntax: Error when attempting to use a marked value as an object key. ([#434](https://github.com/hashicorp/hcl/pull/434))
+* hclsyntax: Error when attempting to use a marked value as an object key in expressions. ([#433](https://github.com/hashicorp/hcl/pull/433))
+
+## v2.8.0 (December 7, 2020)
+
+### Enhancements
+
+* hclsyntax: Expression grouping parentheses will now be reflected by an explicit node in the AST, whereas before they were only considered during parsing. ([#426](https://github.com/hashicorp/hcl/pull/426))
+
+### Bugs Fixed
+
+* hclwrite: The parser will now correctly include the `(` and `)` tokens when an expression is surrounded by parentheses. Previously it would incorrectly recognize those tokens as being extraneous tokens outside of the expression. ([#426](https://github.com/hashicorp/hcl/pull/426))
+* hclwrite: The formatter will now remove (rather than insert) spaces between the `!` (unary boolean "not") operator and its subsequent operand. ([#403](https://github.com/hashicorp/hcl/pull/403))
+* hclsyntax: Unmark conditional values in expressions before checking their truthfulness ([#427](https://github.com/hashicorp/hcl/pull/427))
+
+## v2.7.2 (November 30, 2020)
+
+### Bugs Fixed
+
+* gohcl: Fix panic when decoding into type containing value slices. ([#335](https://github.com/hashicorp/hcl/pull/335))
+* hclsyntax: The unusual expression `null[*]` was previously always returning an unknown value, even though the rules for `[*]` normally call for it to return an empty tuple when applied to a null. As well as being a surprising result, it was particularly problematic because it violated the rule that a calling application may assume that an expression result will always be known unless the application itself introduces unknown values via the evaluation context. `null[*]` will now produce an empty tuple. ([#416](https://github.com/hashicorp/hcl/pull/416))
+* hclsyntax: Fix panic when traversing a list, tuple, or map with cty "marks" ([#424](https://github.com/hashicorp/hcl/pull/424))
+
+## v2.7.1 (November 18, 2020)
+
+### Bugs Fixed
+
+* hclwrite: Correctly handle blank quoted string block labels, instead of dropping them ([#422](https://github.com/hashicorp/hcl/pull/422))
+
+## v2.7.0 (October 14, 2020)
+
+### Enhancements
+
+* json: There is a new function `ParseWithStartPos`, which allows overriding the starting position for parsing in case the given JSON bytes are a fragment of a larger document, such as might happen when decoding with `encoding/json` into a `json.RawMessage`. ([#389](https://github.com/hashicorp/hcl/pull/389))
+* json: There is a new function `ParseExpression`, which allows parsing a JSON string directly in expression mode, whereas previously it was only possible to parse a JSON string in body mode. ([#381](https://github.com/hashicorp/hcl/pull/381))
+* hclwrite: `Block` type now supports `SetType` and `SetLabels`, allowing surgical changes to the type and labels of an existing block without having to reconstruct the entire block. ([#340](https://github.com/hashicorp/hcl/pull/340))
+
+### Bugs Fixed
+
+* hclsyntax: Fix confusing error message for bitwise OR operator ([#380](https://github.com/hashicorp/hcl/pull/380))
+* hclsyntax: Several bug fixes for using HCL with values containing cty "marks" ([#404](https://github.com/hashicorp/hcl/pull/404), [#406](https://github.com/hashicorp/hcl/pull/404), [#407](https://github.com/hashicorp/hcl/pull/404))
+
+## v2.6.0 (June 4, 2020)
+
+### Enhancements
+
+* hcldec: Add a new `Spec`, `ValidateSpec`, which allows custom validation of values at decode-time. ([#387](https://github.com/hashicorp/hcl/pull/387))
+
+### Bugs Fixed
+
+* hclsyntax: Fix panic with combination of sequences and null arguments ([#386](https://github.com/hashicorp/hcl/pull/386))
+* hclsyntax: Fix handling of unknown values and sequences ([#386](https://github.com/hashicorp/hcl/pull/386))
+
+## v2.5.1 (May 14, 2020)
+
+### Bugs Fixed
+
+* hclwrite: handle legacy dot access of numeric indexes. ([#369](https://github.com/hashicorp/hcl/pull/369))
+* hclwrite: Fix panic for dotted full splat (`foo.*`) ([#374](https://github.com/hashicorp/hcl/pull/374))
+
+## v2.5.0 (May 6, 2020)
+
+### Enhancements
+
+* hclwrite: Generate multi-line objects and maps. ([#372](https://github.com/hashicorp/hcl/pull/372))
+
+## v2.4.0 (Apr 13, 2020)
+
+### Enhancements
+
+* The Unicode data tables that HCL uses to produce user-perceived "column" positions in diagnostics and other source ranges are now updated to Unicode 12.0.0, which will cause HCL to produce more accurate column numbers for combining characters introduced to Unicode since Unicode 9.0.0.
+
+### Bugs Fixed
+
+* json: Fix panic when parsing malformed JSON. ([#358](https://github.com/hashicorp/hcl/pull/358))
+
+## v2.3.0 (Jan 3, 2020)
+
+### Enhancements
+
+* ext/tryfunc: Optional functions `try` and `can` to include in your `hcl.EvalContext` when evaluating expressions, which allow users to make decisions based on the success of expressions. ([#330](https://github.com/hashicorp/hcl/pull/330))
+* ext/typeexpr: Now has an optional function `convert` which you can include in your `hcl.EvalContext` when evaluating expressions, allowing users to convert values to specific type constraints using the type constraint expression syntax. ([#330](https://github.com/hashicorp/hcl/pull/330))
+* ext/typeexpr: A new `cty` capsule type `typeexpr.TypeConstraintType` which, when used as either a type constraint for a function parameter or as a type constraint for a `hcldec` attribute specification will cause the given expression to be interpreted as a type constraint expression rather than a value expression. ([#330](https://github.com/hashicorp/hcl/pull/330))
+* ext/customdecode: An optional extension that allows overriding the static decoding behavior for expressions either in function arguments or `hcldec` attribute specifications. ([#330](https://github.com/hashicorp/hcl/pull/330))
+* ext/customdecode: New `cty` capsuletypes `customdecode.ExpressionType` and `customdecode.ExpressionClosureType` which, when used as either a type constraint for a function parameter or as a type constraint for a `hcldec` attribute specification will cause the given expression (and, for the closure type, also the `hcl.EvalContext` it was evaluated in) to be captured for later analysis, rather than immediately evaluated. ([#330](https://github.com/hashicorp/hcl/pull/330))
+
+## v2.2.0 (Dec 11, 2019)
+
+### Enhancements
+
+* hcldec: Attribute evaluation (as part of `AttrSpec` or `BlockAttrsSpec`) now captures expression evaluation metadata in any errors it produces during type conversions, allowing for better feedback in calling applications that are able to make use of this metadata when printing diagnostic messages. ([#329](https://github.com/hashicorp/hcl/pull/329))
+
+### Bugs Fixed
+
+* hclsyntax: `IndexExpr`, `SplatExpr`, and `RelativeTraversalExpr` will now report a source range that covers all of their child expression  nodes. Previously they would report only the operator part, such as `["foo"]`, `[*]`, or `.foo`, which was problematic for callers using source ranges for code analysis. ([#328](https://github.com/hashicorp/hcl/pull/328))
+* hclwrite: Parser will no longer panic when the input includes index, splat, or relative traversal syntax.  ([#328](https://github.com/hashicorp/hcl/pull/328))
+
+## v2.1.0 (Nov 19, 2019)
+
+### Enhancements
+
+* gohcl: When decoding into a struct value with some fields already populated, those values will be retained if not explicitly overwritten in the given HCL body, with similar overriding/merging behavior as `json.Unmarshal` in the Go standard library.
+* hclwrite: New interface to set the expression for an attribute to be a raw token sequence, with no special processing. This has some caveats, so if you intend to use it please refer to the godoc comments. ([#320](https://github.com/hashicorp/hcl/pull/320))
+
+### Bugs Fixed
+
+* hclwrite: The `Body.Blocks` method was returing the blocks in an indefined order, rather than preserving the order of declaration in the source input. ([#313](https://github.com/hashicorp/hcl/pull/313))
+* hclwrite: The `TokensForTraversal` function (and thus in turn the `Body.SetAttributeTraversal` method) was not correctly handling index steps in traversals, and thus producing invalid results. ([#319](https://github.com/hashicorp/hcl/pull/319))
+
+## v2.0.0 (Oct 2, 2019)
+
+Initial release of HCL 2, which is a new implementating combining the HCL 1
+language with the HIL expression language to produce a single language
+supporting both nested configuration structures and arbitrary expressions.
+
+HCL 2 has an entirely new Go library API and so is _not_ a drop-in upgrade
+relative to HCL 1. It's possible to import both versions of HCL into a single
+program using Go's _semantic import versioning_ mechanism:
+
+```
+import (
+    hcl1 "github.com/hashicorp/hcl"
+    hcl2 "github.com/hashicorp/hcl/v2"
+)
+```
+
+---
+
+Prior to v2.0.0 there was not a curated changelog. Consult the git history
+from the latest v1.x.x tag for information on the changes to HCL 1.
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..82b4de9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,353 @@
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. “Contributor”
+
+     means each individual or legal entity that creates, contributes to the
+     creation of, or owns Covered Software.
+
+1.2. “Contributor Version”
+
+     means the combination of the Contributions of others (if any) used by a
+     Contributor and that particular Contributor’s Contribution.
+
+1.3. “Contribution”
+
+     means Covered Software of a particular Contributor.
+
+1.4. “Covered Software”
+
+     means Source Code Form to which the initial Contributor has attached the
+     notice in Exhibit A, the Executable Form of such Source Code Form, and
+     Modifications of such Source Code Form, in each case including portions
+     thereof.
+
+1.5. “Incompatible With Secondary Licenses”
+     means
+
+     a. that the initial Contributor has attached the notice described in
+        Exhibit B to the Covered Software; or
+
+     b. that the Covered Software was made available under the terms of version
+        1.1 or earlier of the License, but not also under the terms of a
+        Secondary License.
+
+1.6. “Executable Form”
+
+     means any form of the work other than Source Code Form.
+
+1.7. “Larger Work”
+
+     means a work that combines Covered Software with other material, in a separate
+     file or files, that is not Covered Software.
+
+1.8. “License”
+
+     means this document.
+
+1.9. “Licensable”
+
+     means having the right to grant, to the maximum extent possible, whether at the
+     time of the initial grant or subsequently, any and all of the rights conveyed by
+     this License.
+
+1.10. “Modifications”
+
+     means any of the following:
+
+     a. any file in Source Code Form that results from an addition to, deletion
+        from, or modification of the contents of Covered Software; or
+
+     b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. “Patent Claims” of a Contributor
+
+      means any patent claim(s), including without limitation, method, process,
+      and apparatus claims, in any patent Licensable by such Contributor that
+      would be infringed, but for the grant of the License, by the making,
+      using, selling, offering for sale, having made, import, or transfer of
+      either its Contributions or its Contributor Version.
+
+1.12. “Secondary License”
+
+      means either the GNU General Public License, Version 2.0, the GNU Lesser
+      General Public License, Version 2.1, the GNU Affero General Public
+      License, Version 3.0, or any later versions of those licenses.
+
+1.13. “Source Code Form”
+
+      means the form of the work preferred for making modifications.
+
+1.14. “You” (or “Your”)
+
+      means an individual or a legal entity exercising rights under this
+      License. For legal entities, “You” includes any entity that controls, is
+      controlled by, or is under common control with You. For purposes of this
+      definition, “control” means (a) the power, direct or indirect, to cause
+      the direction or management of such entity, whether by contract or
+      otherwise, or (b) ownership of more than fifty percent (50%) of the
+      outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+     Each Contributor hereby grants You a world-wide, royalty-free,
+     non-exclusive license:
+
+     a. under intellectual property rights (other than patent or trademark)
+        Licensable by such Contributor to use, reproduce, make available,
+        modify, display, perform, distribute, and otherwise exploit its
+        Contributions, either on an unmodified basis, with Modifications, or as
+        part of a Larger Work; and
+
+     b. under Patent Claims of such Contributor to make, use, sell, offer for
+        sale, have made, import, and otherwise transfer either its Contributions
+        or its Contributor Version.
+
+2.2. Effective Date
+
+     The licenses granted in Section 2.1 with respect to any Contribution become
+     effective for each Contribution on the date the Contributor first distributes
+     such Contribution.
+
+2.3. Limitations on Grant Scope
+
+     The licenses granted in this Section 2 are the only rights granted under this
+     License. No additional rights or licenses will be implied from the distribution
+     or licensing of Covered Software under this License. Notwithstanding Section
+     2.1(b) above, no patent license is granted by a Contributor:
+
+     a. for any code that a Contributor has removed from Covered Software; or
+
+     b. for infringements caused by: (i) Your and any other third party’s
+        modifications of Covered Software, or (ii) the combination of its
+        Contributions with other software (except as part of its Contributor
+        Version); or
+
+     c. under Patent Claims infringed by Covered Software in the absence of its
+        Contributions.
+
+     This License does not grant any rights in the trademarks, service marks, or
+     logos of any Contributor (except as may be necessary to comply with the
+     notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+     No Contributor makes additional grants as a result of Your choice to
+     distribute the Covered Software under a subsequent version of this License
+     (see Section 10.2) or under the terms of a Secondary License (if permitted
+     under the terms of Section 3.3).
+
+2.5. Representation
+
+     Each Contributor represents that the Contributor believes its Contributions
+     are its original creation(s) or it has sufficient rights to grant the
+     rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+     This License is not intended to limit any rights You have under applicable
+     copyright doctrines of fair use, fair dealing, or other equivalents.
+
+2.7. Conditions
+
+     Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+     Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+     All distribution of Covered Software in Source Code Form, including any
+     Modifications that You create or to which You contribute, must be under the
+     terms of this License. You must inform recipients that the Source Code Form
+     of the Covered Software is governed by the terms of this License, and how
+     they can obtain a copy of this License. You may not attempt to alter or
+     restrict the recipients’ rights in the Source Code Form.
+
+3.2. Distribution of Executable Form
+
+     If You distribute Covered Software in Executable Form then:
+
+     a. such Covered Software must also be made available in Source Code Form,
+        as described in Section 3.1, and You must inform recipients of the
+        Executable Form how they can obtain a copy of such Source Code Form by
+        reasonable means in a timely manner, at a charge no more than the cost
+        of distribution to the recipient; and
+
+     b. You may distribute such Executable Form under the terms of this License,
+        or sublicense it under different terms, provided that the license for
+        the Executable Form does not attempt to limit or alter the recipients’
+        rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+     You may create and distribute a Larger Work under terms of Your choice,
+     provided that You also comply with the requirements of this License for the
+     Covered Software. If the Larger Work is a combination of Covered Software
+     with a work governed by one or more Secondary Licenses, and the Covered
+     Software is not Incompatible With Secondary Licenses, this License permits
+     You to additionally distribute such Covered Software under the terms of
+     such Secondary License(s), so that the recipient of the Larger Work may, at
+     their option, further distribute the Covered Software under the terms of
+     either this License or such Secondary License(s).
+
+3.4. Notices
+
+     You may not remove or alter the substance of any license notices (including
+     copyright notices, patent notices, disclaimers of warranty, or limitations
+     of liability) contained within the Source Code Form of the Covered
+     Software, except that You may alter any license notices to the extent
+     required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+     You may choose to offer, and to charge a fee for, warranty, support,
+     indemnity or liability obligations to one or more recipients of Covered
+     Software. However, You may do so only on Your own behalf, and not on behalf
+     of any Contributor. You must make it absolutely clear that any such
+     warranty, support, indemnity, or liability obligation is offered by You
+     alone, and You hereby agree to indemnify every Contributor for any
+     liability incurred by such Contributor as a result of warranty, support,
+     indemnity or liability terms You offer. You may include additional
+     disclaimers of warranty and limitations of liability specific to any
+     jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+   If it is impossible for You to comply with any of the terms of this License
+   with respect to some or all of the Covered Software due to statute, judicial
+   order, or regulation then You must: (a) comply with the terms of this License
+   to the maximum extent possible; and (b) describe the limitations and the code
+   they affect. Such description must be placed in a text file included with all
+   distributions of the Covered Software under this License. Except to the
+   extent prohibited by statute or regulation, such description must be
+   sufficiently detailed for a recipient of ordinary skill to be able to
+   understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+     fail to comply with any of its terms. However, if You become compliant,
+     then the rights granted under this License from a particular Contributor
+     are reinstated (a) provisionally, unless and until such Contributor
+     explicitly and finally terminates Your grants, and (b) on an ongoing basis,
+     if such Contributor fails to notify You of the non-compliance by some
+     reasonable means prior to 60 days after You have come back into compliance.
+     Moreover, Your grants from a particular Contributor are reinstated on an
+     ongoing basis if such Contributor notifies You of the non-compliance by
+     some reasonable means, this is the first time You have received notice of
+     non-compliance with this License from such Contributor, and You become
+     compliant prior to 30 days after Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+     infringement claim (excluding declaratory judgment actions, counter-claims,
+     and cross-claims) alleging that a Contributor Version directly or
+     indirectly infringes any patent, then the rights granted to You by any and
+     all Contributors for the Covered Software under Section 2.1 of this License
+     shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+     license agreements (excluding distributors and resellers) which have been
+     validly granted by You or Your distributors under this License prior to
+     termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+   Covered Software is provided under this License on an “as is” basis, without
+   warranty of any kind, either expressed, implied, or statutory, including,
+   without limitation, warranties that the Covered Software is free of defects,
+   merchantable, fit for a particular purpose or non-infringing. The entire
+   risk as to the quality and performance of the Covered Software is with You.
+   Should any Covered Software prove defective in any respect, You (not any
+   Contributor) assume the cost of any necessary servicing, repair, or
+   correction. This disclaimer of warranty constitutes an essential part of this
+   License. No use of  any Covered Software is authorized under this License
+   except under this disclaimer.
+
+7. Limitation of Liability
+
+   Under no circumstances and under no legal theory, whether tort (including
+   negligence), contract, or otherwise, shall any Contributor, or anyone who
+   distributes Covered Software as permitted above, be liable to You for any
+   direct, indirect, special, incidental, or consequential damages of any
+   character including, without limitation, damages for lost profits, loss of
+   goodwill, work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses, even if such party shall have been
+   informed of the possibility of such damages. This limitation of liability
+   shall not apply to liability for death or personal injury resulting from such
+   party’s negligence to the extent applicable law prohibits such limitation.
+   Some jurisdictions do not allow the exclusion or limitation of incidental or
+   consequential damages, so this exclusion and limitation may not apply to You.
+
+8. Litigation
+
+   Any litigation relating to this License may be brought only in the courts of
+   a jurisdiction where the defendant maintains its principal place of business
+   and such litigation shall be governed by laws of that jurisdiction, without
+   reference to its conflict-of-law provisions. Nothing in this Section shall
+   prevent a party’s ability to bring cross-claims or counter-claims.
+
+9. Miscellaneous
+
+   This License represents the complete agreement concerning the subject matter
+   hereof. If any provision of this License is held to be unenforceable, such
+   provision shall be reformed only to the extent necessary to make it
+   enforceable. Any law or regulation which provides that the language of a
+   contract shall be construed against the drafter shall not be used to construe
+   this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+      Mozilla Foundation is the license steward. Except as provided in Section
+      10.3, no one other than the license steward has the right to modify or
+      publish new versions of this License. Each version will be given a
+      distinguishing version number.
+
+10.2. Effect of New Versions
+
+      You may distribute the Covered Software under the terms of the version of
+      the License under which You originally received the Covered Software, or
+      under the terms of any subsequent version published by the license
+      steward.
+
+10.3. Modified Versions
+
+      If you create software not governed by this License, and you want to
+      create a new license for such software, you may create and use a modified
+      version of this License if you rename the license and remove any
+      references to the name of the license steward (except to note that such
+      modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
+      If You choose to distribute Source Code Form that is Incompatible With
+      Secondary Licenses under the terms of this version of the License, the
+      notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+
+      This Source Code Form is subject to the
+      terms of the Mozilla Public License, v.
+      2.0. If a copy of the MPL was not
+      distributed with this file, You can
+      obtain one at
+      http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file, then
+You may include the notice in a location (such as a LICENSE file in a relevant
+directory) where a recipient would be likely to look for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - “Incompatible With Secondary Licenses” Notice
+
+      This Source Code Form is “Incompatible
+      With Secondary Licenses”, as defined by
+      the Mozilla Public License, v. 2.0.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..9af736c
--- /dev/null
+++ b/README.md
@@ -0,0 +1,219 @@
+# HCL
+
+HCL is a toolkit for creating structured configuration languages that are
+both human- and machine-friendly, for use with command-line tools.
+Although intended to be generally useful, it is primarily targeted
+towards devops tools, servers, etc.
+
+> **NOTE:** This is major version 2 of HCL, whose Go API is incompatible with
+> major version 1. Both versions are available for selection in Go Modules
+> projects. HCL 2 _cannot_ be imported from Go projects that are not using Go Modules. For more information, see
+> [our version selection guide](https://github.com/hashicorp/hcl/wiki/Version-Selection).
+
+HCL has both a _native syntax_, intended to be pleasant to read and write for
+humans, and a JSON-based variant that is easier for machines to generate
+and parse.
+
+The HCL native syntax is inspired by [libucl](https://github.com/vstakhov/libucl),
+[nginx configuration](http://nginx.org/en/docs/beginners_guide.html#conf_structure),
+and others.
+
+It includes an expression syntax that allows basic inline computation and,
+with support from the calling application, use of variables and functions
+for more dynamic configuration languages.
+
+HCL provides a set of constructs that can be used by a calling application to
+construct a configuration language. The application defines which attribute
+names and nested block types are expected, and HCL parses the configuration
+file, verifies that it conforms to the expected structure, and returns
+high-level objects that the application can use for further processing.
+
+```go
+package main
+
+import (
+	"log"
+
+	"github.com/hashicorp/hcl/v2/hclsimple"
+)
+
+type Config struct {
+	IOMode  string        `hcl:"io_mode"`
+	Service ServiceConfig `hcl:"service,block"`
+}
+
+type ServiceConfig struct {
+	Protocol   string          `hcl:"protocol,label"`
+	Type       string          `hcl:"type,label"`
+	ListenAddr string          `hcl:"listen_addr"`
+	Processes  []ProcessConfig `hcl:"process,block"`
+}
+
+type ProcessConfig struct {
+	Type    string   `hcl:"type,label"`
+	Command []string `hcl:"command"`
+}
+
+func main() {
+	var config Config
+	err := hclsimple.DecodeFile("config.hcl", nil, &config)
+	if err != nil {
+		log.Fatalf("Failed to load configuration: %s", err)
+	}
+	log.Printf("Configuration is %#v", config)
+}
+```
+
+A lower-level API is available for applications that need more control over
+the parsing, decoding, and evaluation of configuration. For more information,
+see [the package documentation](https://pkg.go.dev/github.com/hashicorp/hcl/v2).
+
+## Why?
+
+Newcomers to HCL often ask: why not JSON, YAML, etc?
+
+Whereas JSON and YAML are formats for serializing data structures, HCL is
+a syntax and API specifically designed for building structured configuration
+formats.
+
+HCL attempts to strike a compromise between generic serialization formats
+such as JSON and configuration formats built around full programming languages
+such as Ruby. HCL syntax is designed to be easily read and written by humans,
+and allows _declarative_ logic to permit its use in more complex applications.
+
+HCL is intended as a base syntax for configuration formats built
+around key-value pairs and hierarchical blocks whose structure is well-defined
+by the calling application, and this definition of the configuration structure
+allows for better error messages and more convenient definition within the
+calling application.
+
+It can't be denied that JSON is very convenient as a _lingua franca_
+for interoperability between different pieces of software. Because of this,
+HCL defines a common configuration model that can be parsed from either its
+native syntax or from a well-defined equivalent JSON structure. This allows
+configuration to be provided as a mixture of human-authored configuration
+files in the native syntax and machine-generated files in JSON.
+
+## Information Model and Syntax
+
+HCL is built around two primary concepts: _attributes_ and _blocks_. In
+native syntax, a configuration file for a hypothetical application might look
+something like this:
+
+```hcl
+io_mode = "async"
+
+service "http" "web_proxy" {
+  listen_addr = "127.0.0.1:8080"
+  
+  process "main" {
+    command = ["/usr/local/bin/awesome-app", "server"]
+  }
+
+  process "mgmt" {
+    command = ["/usr/local/bin/awesome-app", "mgmt"]
+  }
+}
+```
+
+The JSON equivalent of this configuration is the following:
+
+```json
+{
+  "io_mode": "async",
+  "service": {
+    "http": {
+      "web_proxy": {
+        "listen_addr": "127.0.0.1:8080",
+        "process": {
+          "main": {
+            "command": ["/usr/local/bin/awesome-app", "server"]
+          },
+          "mgmt": {
+            "command": ["/usr/local/bin/awesome-app", "mgmt"]
+          },
+        }
+      }
+    }
+  }
+}
+```
+
+Regardless of which syntax is used, the API within the calling application
+is the same. It can either work directly with the low-level attributes and
+blocks, for more advanced use-cases, or it can use one of the _decoder_
+packages to declaratively extract into either Go structs or dynamic value
+structures.
+
+Attribute values can be expressions as well as just literal values:
+
+```hcl
+# Arithmetic with literals and application-provided variables
+sum = 1 + addend
+
+# String interpolation and templates
+message = "Hello, ${name}!"
+
+# Application-provided functions
+shouty_message = upper(message)
+```
+
+Although JSON syntax doesn't permit direct use of expressions, the interpolation
+syntax allows use of arbitrary expressions within JSON strings:
+
+```json
+{
+  "sum": "${1 + addend}",
+  "message": "Hello, ${name}!",
+  "shouty_message": "${upper(message)}"
+}
+```
+
+For more information, see the detailed specifications:
+
+* [Syntax-agnostic Information Model](spec.md)
+* [HCL Native Syntax](hclsyntax/spec.md)
+* [JSON Representation](json/spec.md)
+
+## Changes in 2.0
+
+Version 2.0 of HCL combines the features of HCL 1.0 with those of the
+interpolation language HIL to produce a single configuration language that
+supports arbitrary expressions.
+
+This new version has a completely new parser and Go API, with no direct
+migration path. Although the syntax is similar, the implementation takes some
+very different approaches to improve on some "rough edges" that existed with
+the original implementation and to allow for more robust error handling.
+
+It's possible to import both HCL 1 and HCL 2 into the same program using Go's
+_semantic import versioning_ mechanism:
+
+```go
+import (
+    hcl1 "github.com/hashicorp/hcl"
+    hcl2 "github.com/hashicorp/hcl/v2"
+)
+```
+
+## Acknowledgements
+
+HCL was heavily inspired by [libucl](https://github.com/vstakhov/libucl),
+by [Vsevolod Stakhov](https://github.com/vstakhov).
+
+HCL and HIL originate in [HashiCorp Terraform](https://terraform.io/),
+with the original parsers for each written by
+[Mitchell Hashimoto](https://github.com/mitchellh).
+
+The original HCL parser was ported to pure Go (from yacc) by
+[Fatih Arslan](https://github.com/fatih). The structure-related portions of
+the new native syntax parser build on that work.
+
+The original HIL parser was ported to pure Go (from yacc) by
+[Martin Atkins](https://github.com/apparentlymart). The expression-related
+portions of the new native syntax parser build on that work.
+
+HCL 2, which merged the original HCL and HIL languages into this single new
+language, builds on design and prototyping work by
+[Martin Atkins](https://github.com/apparentlymart) in
+[zcl](https://github.com/zclconf/go-zcl).
diff --git a/cmd/hcldec/README.md b/cmd/hcldec/README.md
new file mode 100644
index 0000000..963a89f
--- /dev/null
+++ b/cmd/hcldec/README.md
@@ -0,0 +1,100 @@
+# hcldec
+
+`hcldec` is a command line tool that transforms HCL input into JSON output
+using a decoding specification given by the user.
+
+This tool is intended as a "glue" tool, with use-cases like the following:
+
+* Define a HCL-based configuration format for a third-party tool that takes
+  JSON as input, and then translate the HCL configuration into JSON before
+  running the tool. (See [the `npm-package` example](examples/npm-package).)
+
+* Use HCL from languages where a HCL parser/decoder is not yet available.
+  At the time of writing, that's any language other than Go.
+
+* In particular, define a HCL-based configuration format for a shell script
+  and then use `jq` to load the result into environment variables for
+  further processing. (See [the `sh-config-file` example](examples/sh-config-file).)
+
+## Installation
+
+If you have a working Go development environment, you can install this tool
+with `go get` in the usual way:
+
+```
+$ go get -u github.com/hashicorp/hcl/v2/cmd/hcldec
+```
+
+This will install `hcldec` in `$GOPATH/bin`, which usually places it into
+your shell `PATH` so you can then run it as `hcldec`.
+
+## Usage
+
+```
+usage: hcldec --spec=<spec-file> [options] [hcl-file ...]
+  -o, --out string          write to the given file, instead of stdout
+  -s, --spec string         path to spec file (required)
+  -V, --vars json-or-file   provide variables to the given configuration file(s)
+  -v, --version             show the version number and immediately exit
+```
+
+The most important step in using `hcldec` is to write the specification that
+defines how to interpret the given configuration files and translate them
+into JSON. The following is a simple specification that creates a JSON
+object from two top-level attributes in the input configuration:
+
+```hcl
+object {
+  attr "name" {
+    type     = string
+    required = true
+  }
+  attr "is_member" {
+    type = bool
+  }
+}
+```
+
+Specification files are conventionally kept in files with a `.hcldec`
+extension. We'll call this one `example.hcldec`.
+
+With the above specification, the following input file `example.conf` is
+valid:
+
+```hcl
+name = "Raul"
+```
+
+The spec and the input file can then be provided to `hcldec` to extract a
+JSON representation:
+
+```
+$ hcldec --spec=example.hcldec example.conf
+{"name": "Raul"}
+```
+
+The specification defines both how to map the input into a JSON data structure
+and what input is valid. The `required = true` specified for the `name`
+allows `hcldec` to detect and raise an error when an attribute of that name
+is not provided:
+
+```
+$ hcldec --spec=example.hcldec typo.conf
+Error: Unsupported attribute
+
+  on example.conf line 1:
+   1: namme = "Juan"
+
+An attribute named "namme" is not expected here. Did you mean "name"?
+
+Error: Missing required attribute
+
+  on example.conf line 2:
+
+The attribute "name" is required, but no definition was found.
+```
+
+## Further Reading
+
+For more details on the `.hcldec` specification file format, see
+[the spec file documentation](spec-format.md).
diff --git a/cmd/hcldec/diags_json.go b/cmd/hcldec/diags_json.go
new file mode 100644
index 0000000..f0b4a40
--- /dev/null
+++ b/cmd/hcldec/diags_json.go
@@ -0,0 +1,101 @@
+package main
+
+import (
+	"encoding/json"
+	"io"
+
+	"github.com/hashicorp/hcl/v2"
+)
+
+type jsonDiagWriter struct {
+	w     io.Writer
+	diags hcl.Diagnostics
+}
+
+var _ hcl.DiagnosticWriter = &jsonDiagWriter{}
+
+func (wr *jsonDiagWriter) WriteDiagnostic(diag *hcl.Diagnostic) error {
+	wr.diags = append(wr.diags, diag)
+	return nil
+}
+
+func (wr *jsonDiagWriter) WriteDiagnostics(diags hcl.Diagnostics) error {
+	wr.diags = append(wr.diags, diags...)
+	return nil
+}
+
+func (wr *jsonDiagWriter) Flush() error {
+	if len(wr.diags) == 0 {
+		return nil
+	}
+
+	type PosJSON struct {
+		Line   int `json:"line"`
+		Column int `json:"column"`
+		Byte   int `json:"byte"`
+	}
+	type RangeJSON struct {
+		Filename string  `json:"filename"`
+		Start    PosJSON `json:"start"`
+		End      PosJSON `json:"end"`
+	}
+	type DiagnosticJSON struct {
+		Severity string     `json:"severity"`
+		Summary  string     `json:"summary"`
+		Detail   string     `json:"detail,omitempty"`
+		Subject  *RangeJSON `json:"subject,omitempty"`
+	}
+	type DiagnosticsJSON struct {
+		Diagnostics []DiagnosticJSON `json:"diagnostics"`
+	}
+
+	diagsJSON := make([]DiagnosticJSON, 0, len(wr.diags))
+	for _, diag := range wr.diags {
+		var diagJSON DiagnosticJSON
+
+		switch diag.Severity {
+		case hcl.DiagError:
+			diagJSON.Severity = "error"
+		case hcl.DiagWarning:
+			diagJSON.Severity = "warning"
+		default:
+			diagJSON.Severity = "(unknown)" // should never happen
+		}
+
+		diagJSON.Summary = diag.Summary
+		diagJSON.Detail = diag.Detail
+		if diag.Subject != nil {
+			diagJSON.Subject = &RangeJSON{}
+			sJSON := diagJSON.Subject
+			rng := diag.Subject
+			sJSON.Filename = rng.Filename
+			sJSON.Start.Line = rng.Start.Line
+			sJSON.Start.Column = rng.Start.Column
+			sJSON.Start.Byte = rng.Start.Byte
+			sJSON.End.Line = rng.End.Line
+			sJSON.End.Column = rng.End.Column
+			sJSON.End.Byte = rng.End.Byte
+		}
+
+		diagsJSON = append(diagsJSON, diagJSON)
+	}
+
+	src, err := json.MarshalIndent(DiagnosticsJSON{diagsJSON}, "", "  ")
+	if err != nil {
+		return err
+	}
+	_, err = wr.w.Write(src)
+	wr.w.Write([]byte{'\n'})
+	return err
+}
+
+type flusher interface {
+	Flush() error
+}
+
+func flush(maybeFlusher interface{}) error {
+	if f, ok := maybeFlusher.(flusher); ok {
+		return f.Flush()
+	}
+	return nil
+}
diff --git a/cmd/hcldec/examples/npm-package/example.npmhcl b/cmd/hcldec/examples/npm-package/example.npmhcl
new file mode 100644
index 0000000..445ba77
--- /dev/null
+++ b/cmd/hcldec/examples/npm-package/example.npmhcl
@@ -0,0 +1,14 @@
+name    = "hello-world"
+version = "v0.0.1"
+
+author {
+  name = "Иван Петрович Сидоров"
+}
+
+contributor {
+  name = "Juan Pérez"
+}
+
+dependencies = {
+    left-pad = "1.2.0"
+}
diff --git a/cmd/hcldec/examples/npm-package/spec.hcldec b/cmd/hcldec/examples/npm-package/spec.hcldec
new file mode 100644
index 0000000..a15c187
--- /dev/null
+++ b/cmd/hcldec/examples/npm-package/spec.hcldec
@@ -0,0 +1,136 @@
+object {
+  attr "name" {
+    type     = string
+    required = true
+  }
+  attr "version" {
+    type     = string
+    required = true
+  }
+  attr "description" {
+    type = string
+  }
+  attr "keywords" {
+    type = list(string)
+  }
+  attr "homepage" {
+    # "homepage_url" in input file is translated to "homepage" in output
+    name = "homepage_url"
+  }
+  block "bugs" {
+    object {
+      attr "url" {
+        type = string
+      }
+      attr "email" {
+        type = string
+      }
+    }
+  }
+  attr "license" {
+    type = string
+  }
+  block "author" {
+    object {
+      attr "name" {
+        type = string
+      }
+      attr "email" {
+        type = string
+      }
+      attr "url" {
+        type = string
+      }
+    }
+  }
+  block_list "contributors" {
+    block_type = "contributor"
+    object {
+      attr "name" {
+        type = string
+      }
+      attr "email" {
+        type = string
+      }
+      attr "url" {
+        type = string
+      }
+    }
+  }
+  attr "files" {
+    type = list(string)
+  }
+  attr "main" {
+    type = string
+  }
+  attr "bin" {
+    type = map(string)
+  }
+  attr "man" {
+    type = list(string)
+  }
+  attr "directories" {
+    type = map(string)
+  }
+  block "repository" {
+    object {
+      attr "type" {
+        type     = string
+        required = true
+      }
+      attr "url" {
+        type     = string
+        required = true
+      }
+    }
+  }
+  attr "scripts" {
+    type = map(string)
+  }
+  attr "config" {
+    type = map(string)
+  }
+  attr "dependencies" {
+    type = map(string)
+  }
+  attr "devDependencies" {
+    name = "dev_dependencies"
+    type = map(string)
+  }
+  attr "peerDependencies" {
+    name = "peer_dependencies"
+    type = map(string)
+  }
+  attr "bundledDependencies" {
+    name = "bundled_dependencies"
+    type = map(string)
+  }
+  attr "optionalDependencies" {
+    name = "optional_dependencies"
+    type = map(string)
+  }
+  attr "engines" {
+    type = map(string)
+  }
+  attr "os" {
+    type = list(string)
+  }
+  attr "cpu" {
+    type = list(string)
+  }
+  attr "prefer_global" {
+    type = bool
+  }
+  default "private" {
+    attr {
+      name = "private"
+      type = bool
+    }
+    literal {
+      value = false
+    }
+  }
+  attr "publishConfig" {
+    type = map(any)
+  }
+}
diff --git a/cmd/hcldec/examples/sh-config-file/example.conf b/cmd/hcldec/examples/sh-config-file/example.conf
new file mode 100644
index 0000000..c0d7705
--- /dev/null
+++ b/cmd/hcldec/examples/sh-config-file/example.conf
@@ -0,0 +1,10 @@
+name = "Juan"
+friend {
+  name = "John"
+}
+friend {
+  name = "Yann"
+}
+friend {
+  name = "Ermintrude"
+}
diff --git a/cmd/hcldec/examples/sh-config-file/example.sh b/cmd/hcldec/examples/sh-config-file/example.sh
new file mode 100755
index 0000000..95a0080
--- /dev/null
+++ b/cmd/hcldec/examples/sh-config-file/example.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+set -euo pipefail
+
+# All paths from this point on are relative to the directory containing this
+# script, for simplicity's sake.
+cd "$( dirname "${BASH_SOURCE[0]}" )"
+
+# Read the config file using hcldec and then use jq to extract values in a
+# shell-friendly form. jq will ensure that the values are properly quoted and
+# escaped for consumption by the shell.
+CONFIG_VARS="$(hcldec --spec=spec.hcldec example.conf | jq -r '@sh "NAME=\(.name) GREETING=\(.greeting) FRIENDS=(\(.friends))"')"
+if [ $? != 0 ]; then
+    # If hcldec or jq failed then it has already printed out some error messages
+    # and so we can bail out.
+    exit $?
+fi
+
+# Import our settings into our environment
+eval "$CONFIG_VARS"
+
+# ...and now, some contrived usage of the settings we loaded:
+echo "$GREETING $NAME!"
+for name in ${FRIENDS[@]}; do
+    echo "$GREETING $name, too!"
+done
diff --git a/cmd/hcldec/examples/sh-config-file/spec.hcldec b/cmd/hcldec/examples/sh-config-file/spec.hcldec
new file mode 100644
index 0000000..6b15fdc
--- /dev/null
+++ b/cmd/hcldec/examples/sh-config-file/spec.hcldec
@@ -0,0 +1,23 @@
+object {
+  attr "name" {
+    type     = string
+    required = true
+  }
+  default "greeting" {
+    attr {
+      name = "greeting"
+      type = string
+    }
+    literal {
+      value = "Hello"
+    }
+  }
+  block_list "friends" {
+    block_type = "friend"
+    attr {
+      name     = "name"
+      type     = string
+      required = true
+    }
+  }
+}
diff --git a/cmd/hcldec/main.go b/cmd/hcldec/main.go
new file mode 100644
index 0000000..5be672d
--- /dev/null
+++ b/cmd/hcldec/main.go
@@ -0,0 +1,376 @@
+package main
+
+import (
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"io/ioutil"
+	"os"
+	"strings"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hcldec"
+	"github.com/hashicorp/hcl/v2/hclparse"
+	flag "github.com/spf13/pflag"
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/function"
+	ctyjson "github.com/zclconf/go-cty/cty/json"
+	"golang.org/x/crypto/ssh/terminal"
+)
+
+const versionStr = "0.0.1-dev"
+
+// vars is populated from --vars arguments on the command line, via a flag
+// registration in init() below.
+var vars = &varSpecs{}
+
+var (
+	specFile    = flag.StringP("spec", "s", "", "path to spec file (required)")
+	outputFile  = flag.StringP("out", "o", "", "write to the given file, instead of stdout")
+	diagsFormat = flag.StringP("diags", "", "", "format any returned diagnostics in the given format; currently only \"json\" is accepted")
+	showVarRefs = flag.BoolP("var-refs", "", false, "rather than decoding input, produce a JSON description of the variables referenced by it")
+	withType    = flag.BoolP("with-type", "", false, "include an additional object level at the top describing the HCL-oriented type of the result value")
+	showVersion = flag.BoolP("version", "v", false, "show the version number and immediately exit")
+	keepNulls   = flag.BoolP("keep-nulls", "", false, "retain object properties that have null as their value (they are removed by default)")
+)
+
+var parser = hclparse.NewParser()
+var diagWr hcl.DiagnosticWriter // initialized in init
+
+func init() {
+	flag.VarP(vars, "vars", "V", "provide variables to the given configuration file(s)")
+}
+
+func main() {
+	flag.Usage = usage
+	flag.Parse()
+
+	if *showVersion {
+		fmt.Println(versionStr)
+		os.Exit(0)
+	}
+
+	args := flag.Args()
+
+	switch *diagsFormat {
+	case "":
+		color := terminal.IsTerminal(int(os.Stderr.Fd()))
+		w, _, err := terminal.GetSize(int(os.Stdout.Fd()))
+		if err != nil {
+			w = 80
+		}
+		diagWr = hcl.NewDiagnosticTextWriter(os.Stderr, parser.Files(), uint(w), color)
+	case "json":
+		diagWr = &jsonDiagWriter{w: os.Stderr}
+	default:
+		fmt.Fprintf(os.Stderr, "Invalid diagnostics format %q: only \"json\" is supported.\n", *diagsFormat)
+		os.Exit(2)
+	}
+
+	err := realmain(args)
+
+	if err != nil {
+		fmt.Fprintf(os.Stderr, "Error: %s\n\n", err.Error())
+		os.Exit(1)
+	}
+}
+
+func realmain(args []string) error {
+
+	if *specFile == "" {
+		return fmt.Errorf("the --spec=... argument is required")
+	}
+
+	var diags hcl.Diagnostics
+
+	specContent, specDiags := loadSpecFile(*specFile)
+	diags = append(diags, specDiags...)
+	if specDiags.HasErrors() {
+		diagWr.WriteDiagnostics(diags)
+		flush(diagWr)
+		os.Exit(2)
+	}
+
+	spec := specContent.RootSpec
+
+	ctx := &hcl.EvalContext{
+		Variables: map[string]cty.Value{},
+		Functions: map[string]function.Function{},
+	}
+	for name, val := range specContent.Variables {
+		ctx.Variables[name] = val
+	}
+	for name, f := range specContent.Functions {
+		ctx.Functions[name] = f
+	}
+	if len(*vars) != 0 {
+		for i, varsSpec := range *vars {
+			var vals map[string]cty.Value
+			var valsDiags hcl.Diagnostics
+			if strings.HasPrefix(strings.TrimSpace(varsSpec), "{") {
+				// literal JSON object on the command line
+				vals, valsDiags = parseVarsArg(varsSpec, i)
+			} else {
+				// path to a file containing either HCL or JSON (by file extension)
+				vals, valsDiags = parseVarsFile(varsSpec)
+			}
+			diags = append(diags, valsDiags...)
+			for k, v := range vals {
+				ctx.Variables[k] = v
+			}
+		}
+	}
+
+	// If we have empty context elements then we'll nil them out so that
+	// we'll produce e.g. "variables are not allowed" errors instead of
+	// "variable not found" errors.
+	if len(ctx.Variables) == 0 {
+		ctx.Variables = nil
+	}
+	if len(ctx.Functions) == 0 {
+		ctx.Functions = nil
+	}
+	if ctx.Variables == nil && ctx.Functions == nil {
+		ctx = nil
+	}
+
+	var bodies []hcl.Body
+
+	if len(args) == 0 {
+		src, err := ioutil.ReadAll(os.Stdin)
+		if err != nil {
+			return fmt.Errorf("failed to read stdin: %s", err)
+		}
+
+		f, fDiags := parser.ParseHCL(src, "<stdin>")
+		diags = append(diags, fDiags...)
+		if !fDiags.HasErrors() {
+			bodies = append(bodies, f.Body)
+		}
+	} else {
+		for _, filename := range args {
+			var f *hcl.File
+			var fDiags hcl.Diagnostics
+			if strings.HasSuffix(filename, ".json") {
+				f, fDiags = parser.ParseJSONFile(filename)
+			} else {
+				f, fDiags = parser.ParseHCLFile(filename)
+			}
+			diags = append(diags, fDiags...)
+			if !fDiags.HasErrors() {
+				bodies = append(bodies, f.Body)
+			}
+		}
+	}
+
+	if diags.HasErrors() {
+		diagWr.WriteDiagnostics(diags)
+		flush(diagWr)
+		os.Exit(2)
+	}
+
+	var body hcl.Body
+	switch len(bodies) {
+	case 0:
+		// should never happen, but... okay?
+		body = hcl.EmptyBody()
+	case 1:
+		body = bodies[0]
+	default:
+		body = hcl.MergeBodies(bodies)
+	}
+
+	if *showVarRefs {
+		vars := hcldec.Variables(body, spec)
+		return showVarRefsJSON(vars, ctx)
+	}
+
+	val, decDiags := hcldec.Decode(body, spec, ctx)
+	diags = append(diags, decDiags...)
+
+	if diags.HasErrors() {
+		diagWr.WriteDiagnostics(diags)
+		flush(diagWr)
+		os.Exit(2)
+	}
+
+	wantType := val.Type()
+	if *withType {
+		// We'll instead ask to encode as dynamic, which will make the
+		// marshaler include type information.
+		wantType = cty.DynamicPseudoType
+	}
+	out, err := ctyjson.Marshal(val, wantType)
+	if err != nil {
+		return err
+	}
+
+	// hcldec will include explicit nulls where an ObjectSpec has a spec
+	// that refers to a missing item, but that'll probably be annoying for
+	// a consumer of our output to deal with so we'll just strip those
+	// out and reduce to only the non-null values.
+	if !*keepNulls {
+		out = stripJSONNullProperties(out)
+	}
+
+	target := os.Stdout
+	if *outputFile != "" {
+		target, err = os.OpenFile(*outputFile, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, os.ModePerm)
+		if err != nil {
+			return fmt.Errorf("can't open %s for writing: %s", *outputFile, err)
+		}
+	}
+
+	fmt.Fprintf(target, "%s\n", out)
+
+	return nil
+}
+
+func usage() {
+	fmt.Fprintf(os.Stderr, "usage: hcldec --spec=<spec-file> [options] [hcl-file ...]\n")
+	flag.PrintDefaults()
+	os.Exit(2)
+}
+
+func showVarRefsJSON(vars []hcl.Traversal, ctx *hcl.EvalContext) error {
+	type PosJSON struct {
+		Line   int `json:"line"`
+		Column int `json:"column"`
+		Byte   int `json:"byte"`
+	}
+	type RangeJSON struct {
+		Filename string  `json:"filename"`
+		Start    PosJSON `json:"start"`
+		End      PosJSON `json:"end"`
+	}
+	type StepJSON struct {
+		Kind  string          `json:"kind"`
+		Name  string          `json:"name,omitempty"`
+		Key   json.RawMessage `json:"key,omitempty"`
+		Range RangeJSON       `json:"range"`
+	}
+	type TraversalJSON struct {
+		RootName string          `json:"root_name"`
+		Value    json.RawMessage `json:"value,omitempty"`
+		Steps    []StepJSON      `json:"steps"`
+		Range    RangeJSON       `json:"range"`
+	}
+
+	ret := make([]TraversalJSON, 0, len(vars))
+	for _, traversal := range vars {
+		tJSON := TraversalJSON{
+			Steps: make([]StepJSON, 0, len(traversal)),
+		}
+
+		for _, step := range traversal {
+			var sJSON StepJSON
+			rng := step.SourceRange()
+			sJSON.Range.Filename = rng.Filename
+			sJSON.Range.Start.Line = rng.Start.Line
+			sJSON.Range.Start.Column = rng.Start.Column
+			sJSON.Range.Start.Byte = rng.Start.Byte
+			sJSON.Range.End.Line = rng.End.Line
+			sJSON.Range.End.Column = rng.End.Column
+			sJSON.Range.End.Byte = rng.End.Byte
+			switch ts := step.(type) {
+			case hcl.TraverseRoot:
+				sJSON.Kind = "root"
+				sJSON.Name = ts.Name
+				tJSON.RootName = ts.Name
+			case hcl.TraverseAttr:
+				sJSON.Kind = "attr"
+				sJSON.Name = ts.Name
+			case hcl.TraverseIndex:
+				sJSON.Kind = "index"
+				src, err := ctyjson.Marshal(ts.Key, ts.Key.Type())
+				if err == nil {
+					sJSON.Key = json.RawMessage(src)
+				}
+			default:
+				// Should never get here, since the above should be exhaustive
+				// for all possible traversal step types.
+				sJSON.Kind = "(unknown)"
+			}
+			tJSON.Steps = append(tJSON.Steps, sJSON)
+		}
+
+		// Best effort, we'll try to include the current known value of this
+		// traversal, if any.
+		val, diags := traversal.TraverseAbs(ctx)
+		if !diags.HasErrors() {
+			enc, err := ctyjson.Marshal(val, val.Type())
+			if err == nil {
+				tJSON.Value = json.RawMessage(enc)
+			}
+		}
+
+		rng := traversal.SourceRange()
+		tJSON.Range.Filename = rng.Filename
+		tJSON.Range.Start.Line = rng.Start.Line
+		tJSON.Range.Start.Column = rng.Start.Column
+		tJSON.Range.Start.Byte = rng.Start.Byte
+		tJSON.Range.End.Line = rng.End.Line
+		tJSON.Range.End.Column = rng.End.Column
+		tJSON.Range.End.Byte = rng.End.Byte
+
+		ret = append(ret, tJSON)
+	}
+
+	out, err := json.MarshalIndent(ret, "", "  ")
+	if err != nil {
+		return fmt.Errorf("failed to marshal variable references as JSON: %s", err)
+	}
+
+	target := os.Stdout
+	if *outputFile != "" {
+		target, err = os.OpenFile(*outputFile, os.O_TRUNC|os.O_CREATE|os.O_WRONLY, os.ModePerm)
+		if err != nil {
+			return fmt.Errorf("can't open %s for writing: %s", *outputFile, err)
+		}
+	}
+
+	fmt.Fprintf(target, "%s\n", out)
+
+	return nil
+}
+
+func stripJSONNullProperties(src []byte) []byte {
+	dec := json.NewDecoder(bytes.NewReader(src))
+	dec.UseNumber()
+
+	var v interface{}
+	err := dec.Decode(&v)
+	if err != nil {
+		// We expect valid JSON
+		panic(err)
+	}
+
+	v = stripNullMapElements(v)
+
+	new, err := json.Marshal(v)
+	if err != nil {
+		panic(err)
+	}
+	return new
+}
+
+func stripNullMapElements(v interface{}) interface{} {
+	switch tv := v.(type) {
+	case map[string]interface{}:
+		for k, ev := range tv {
+			if ev == nil {
+				delete(tv, k)
+			} else {
+				tv[k] = stripNullMapElements(ev)
+			}
+		}
+		return v
+	case []interface{}:
+		for i, ev := range tv {
+			tv[i] = stripNullMapElements(ev)
+		}
+		return v
+	default:
+		return v
+	}
+}
diff --git a/cmd/hcldec/spec-format.md b/cmd/hcldec/spec-format.md
new file mode 100644
index 0000000..60dbba4
--- /dev/null
+++ b/cmd/hcldec/spec-format.md
@@ -0,0 +1,487 @@
+# `hcldec` spec format
+
+The `hcldec` spec format instructs [`hcldec`](README.md) on how to validate
+one or more configuration files given in the HCL syntax and how to translate
+the result into JSON format.
+
+The spec format is itself built from HCL syntax, with each HCL block serving
+as a _spec_ whose block type and contents together describe a single mapping
+action and, in most cases, a validation constraint. Each spec block produces
+one JSON value.
+
+A spec _file_ must have a single top-level spec block that describes the
+top-level JSON value `hcldec` will return, and that spec block may have other
+nested spec blocks (depending on its type) that produce nested structures and
+additional validation constraints.
+
+The most common usage of `hcldec` is to produce a JSON object whose properties
+are derived from the top-level content of the input file. In this case, the
+root of the given spec file will have an `object` spec block whose contents
+describe how each of the object's properties are to be populated using
+nested spec blocks.
+
+Each spec is evaluated in the context of an HCL _body_, which is the HCL
+terminology for one level of nesting in a configuration file. The top-level
+objects in a file all belong to the root body of that file, and then each
+nested block has its own body containing the elements within that block.
+Some spec types select a new body as the context for their nested specs,
+allowing nested HCL structures to be decoded.
+
+## Spec Block Types
+
+The following sections describe the different block types that can be used to
+define specs within a spec file.
+
+### `object` spec blocks
+
+The `object` spec type is the most commonly used at the root of a spec file.
+Its result is a JSON object whose properties are set based on any nested
+spec blocks:
+
+```hcl
+object {
+  attr "name" {
+    type = string
+  }
+  block "address" {
+    object {
+      attr "street" {
+        type = string
+      }
+      # ...
+    }
+  }
+}
+```
+
+Nested spec blocks inside `object` must always have an extra block label
+`"name"`, `"address"` and `"street"` in the above example) that specifies
+the name of the property that should be created in the JSON object result.
+This label also acts as a default name selector for the nested spec, allowing
+the `attr` blocks in the above example to omit the usually-required `name`
+argument in cases where the HCL input name and JSON output name are the same.
+
+An `object` spec block creates no validation constraints, but it passes on
+any validation constraints created by the nested specs.
+
+### `array` spec blocks
+
+The `array` spec type produces a JSON array whose elements are set based on
+any nested spec blocks:
+
+```hcl
+array {
+  attr {
+    name = "first_element"
+    type = string
+  }
+  attr {
+    name = "second_element"
+    type = string
+  }
+}
+```
+
+An `array` spec block creates no validation constraints, but it passes on
+any validation constraints created by the nested specs.
+
+### `attr` spec blocks
+
+The `attr` spec type reads the value of an attribute in the current body
+and returns that value as its result. It also creates validation constraints
+for the given attribute name and its value.
+
+```hcl
+attr {
+  name     = "document_root"
+  type     = string
+  required = true
+}
+```
+
+`attr` spec blocks accept the following arguments:
+
+* `name` (required) - The attribute name to expect within the HCL input file.
+  This may be omitted when a default name selector is created by a parent
+  `object` spec, if the input attribute name should match the output JSON
+  object property name.
+
+* `type` (optional) - A [type expression](#type-expressions) that the given
+  attribute value must conform to. If this argument is set, `hcldec` will
+  automatically convert the given input value to this type or produce an
+  error if that is not possible.
+
+* `required` (optional) - If set to `true`, `hcldec` will produce an error
+  if a value is not provided for the source attribute.
+
+`attr` is a leaf spec type, so no nested spec blocks are permitted.
+
+### `block` spec blocks
+
+The `block` spec type applies one nested spec block to the contents of a
+block within the current body and returns the result of that spec. It also
+creates validation constraints for the given block type name.
+
+```hcl
+block {
+  block_type = "logging"
+
+  object {
+    attr "level" {
+      type = string
+    }
+    attr "file" {
+      type = string
+    }
+  }
+}
+```
+
+`block` spec blocks accept the following arguments:
+
+* `block_type` (required) - The block type name to expect within the HCL
+  input file. This may be omitted when a default name selector is created
+  by a parent `object` spec, if the input block type name should match the
+  output JSON object property name.
+
+* `required` (optional) - If set to `true`, `hcldec` will produce an error
+  if a block of the specified type is not present in the current body.
+
+`block` creates a validation constraint that there must be zero or one blocks
+of the given type name, or exactly one if `required` is set.
+
+`block` expects a single nested spec block, which is applied to the body of
+the block of the given type when it is present.
+
+### `block_list` spec blocks
+
+The `block_list` spec type is similar to `block`, but it accepts zero or
+more blocks of a specified type rather than requiring zero or one. The
+result is a JSON array with one entry per block of the given type.
+
+```hcl
+block_list {
+  block_type = "log_file"
+
+  object {
+    attr "level" {
+      type = string
+    }
+    attr "filename" {
+      type     = string
+      required = true
+    }
+  }
+}
+```
+
+`block_list` spec blocks accept the following arguments:
+
+* `block_type` (required) - The block type name to expect within the HCL
+  input file. This may be omitted when a default name selector is created
+  by a parent `object` spec, if the input block type name should match the
+  output JSON object property name.
+
+* `min_items` (optional) - If set to a number greater than zero, `hcldec` will
+  produce an error if fewer than the given number of blocks are present.
+
+* `max_items` (optional) - If set to a number greater than zero, `hcldec` will
+  produce an error if more than the given number of blocks are present. This
+  attribute must be greater than or equal to `min_items` if both are set.
+
+`block` creates a validation constraint on the number of blocks of the given
+type that must be present.
+
+`block` expects a single nested spec block, which is applied to the body of
+each matching block to produce the resulting list items.
+
+### `block_set` spec blocks
+
+The `block_set` spec type behaves the same as `block_list` except that
+the result is in no specific order and any duplicate items are removed.
+
+```hcl
+block_set {
+  block_type = "log_file"
+
+  object {
+    attr "level" {
+      type = string
+    }
+    attr "filename" {
+      type     = string
+      required = true
+    }
+  }
+}
+```
+
+The contents of `block_set` are the same as for `block_list`.
+
+### `block_map` spec blocks
+
+The `block_map` spec type is similar to `block`, but it accepts zero or
+more blocks of a specified type rather than requiring zero or one. The
+result is a JSON object, or possibly multiple nested JSON objects, whose
+properties are derived from the labels set on each matching block.
+
+```hcl
+block_map {
+  block_type = "log_file"
+  labels = ["filename"]
+
+  object {
+    attr "level" {
+      type     = string
+      required = true
+    }
+  }
+}
+```
+
+`block_map` spec blocks accept the following arguments:
+
+* `block_type` (required) - The block type name to expect within the HCL
+  input file. This may be omitted when a default name selector is created
+  by a parent `object` spec, if the input block type name should match the
+  output JSON object property name.
+
+* `labels` (required) - A list of user-oriented block label names. Each entry
+  in this list creates one level of object within the output value, and
+  requires one additional block header label on any child block of this type.
+  Block header labels are the quoted strings that appear after the block type
+  name but before the opening `{`.
+
+`block` creates a validation constraint on the number of labels that blocks
+of the given type must have.
+
+`block` expects a single nested spec block, which is applied to the body of
+each matching block to produce the resulting map items.
+
+## `block_attrs` spec blocks
+
+The `block_attrs` spec type is similar to an `attr` spec block of a map type,
+but it produces a map from the attributes of a block rather than from an
+attribute's expression.
+
+```hcl
+block_attrs {
+  block_type   = "variables"
+  element_type = string
+  required     = false
+}
+```
+
+This allows a map with user-defined keys to be produced within block syntax,
+but due to the constraints of that syntax it also means that the user will
+be unable to dynamically-generate either individual key names using key
+expressions or the entire map value using a `for` expression.
+
+`block_attrs` spec blocks accept the following arguments:
+
+* `block_type` (required) - The block type name to expect within the HCL
+  input file. This may be omitted when a default name selector is created
+  by a parent `object` spec, if the input block type name should match the
+  output JSON object property name.
+
+* `element_type` (required) - The value type to require for each of the
+  attributes within a matched block. The resulting value will be a JSON
+  object whose property values are of this type.
+
+* `required` (optional) - If `true`, an error will be produced if a block
+  of the given type is not present. If `false` -- the default -- an absent
+  block will be indicated by producing `null`.
+
+## `literal` spec blocks
+
+The `literal` spec type returns a given literal value, and creates no
+validation constraints. It is most commonly used with the `default` spec
+type to create a fallback value, but can also be used e.g. to fill out
+required properties in an `object` spec that do not correspond to any
+construct in the input configuration.
+
+```hcl
+literal {
+  value = "hello world"
+}
+```
+
+`literal` spec blocks accept the following argument:
+
+* `value` (required) - The value to return. This attribute may be an expression
+  that uses [functions](#spec-definition-functions).
+
+`literal` is a leaf spec type, so no nested spec blocks are permitted.
+
+## `default` spec blocks
+
+The `default` spec type evaluates a sequence of nested specs in turn and
+returns the result of the first one that produces a non-null value.
+It creates no validation constraints of its own, but passes on the validation
+constraints from its first nested block.
+
+```hcl
+default {
+  attr {
+    name = "private"
+    type = bool
+  }
+  literal {
+    value = false
+  }
+}
+```
+
+A `default` spec block must have at least one nested spec block, and should
+generally have at least two since otherwise the `default` wrapper is a no-op.
+
+The second and any subsequent spec blocks are _fallback_ specs. These exhibit
+their usual behavior but are not able to impose validation constraints on the
+current body since they are not evaluated unless all prior specs produce
+`null` as their result.
+
+## `transform` spec blocks
+
+The `transform` spec type evaluates one nested spec and then evaluates a given
+expression with that nested spec result to produce a final value.
+It creates no validation constraints of its own, but passes on the validation
+constraints from its nested block.
+
+```hcl
+transform {
+  attr {
+    name = "size_in_mb"
+    type = number
+  }
+
+  # Convert result to a size in bytes
+  result = nested * 1024 * 1024
+}
+```
+
+`transform` spec blocks accept the following argument:
+
+* `result` (required) - The expression to evaluate on the result of the nested
+  spec. The variable `nested` is defined when evaluating this expression, with
+  the result value of the nested spec.
+
+The `result` expression may use [functions](#spec-definition-functions).
+
+## Predefined Variables
+
+`hcldec` accepts values for variables to expose into the input file's
+expression scope as CLI options, and this is the most common way to pass
+values since it allows them to be dynamically populated by the calling
+application.
+
+However, it's also possible to pre-define variables with constant values
+within a spec file, using the top-level `variables` block type:
+
+```hcl
+variables {
+  name = "Stephen"
+}
+```
+
+Variables of the same name defined via the `hcldec` command line with override
+predefined variables of the same name, so this mechanism can also be used to
+provide defaults for variables that are overridden only in certain contexts.
+
+## Custom Functions
+
+The spec can make arbitrary HCL functions available in the input file's
+expression scope, and thus allow simple computation within the input file,
+in addition to HCL's built-in operators.
+
+Custom functions are defined in the spec file with the top-level `function`
+block type:
+
+```
+function "add_one" {
+  params = [n]
+  result = n + 1
+}
+```
+
+Functions behave in a similar way to the `transform` spec type in that the
+given `result` attribute expression is evaluated with additional variables
+defined with the same names as the defined `params`.
+
+The [spec definition functions](#spec-definition-functions) can be used within
+custom function expressions, allowing them to be optionally exposed into the
+input file:
+
+```
+function "upper" {
+  params = [str]
+  result = upper(str)
+}
+
+function "min" {
+  params         = []
+  variadic_param = nums
+  result         = min(nums...)
+}
+```
+
+Custom functions defined in the spec cannot be called from the spec itself.
+
+## Spec Definition Functions
+
+Certain expressions within a specification may use the following functions.
+The documentation for each spec type above specifies where functions may
+be used.
+
+* `abs(number)` returns the absolute (positive) value of the given number.
+* `coalesce(vals...)` returns the first non-null value given.
+* `concat(lists...)` concatenates together all of the given lists to produce a new list.
+* `hasindex(val, idx)` returns true if the expression `val[idx]` could succeed.
+* `int(number)` returns the integer portion of the given number, rounding towards zero.
+* `jsondecode(str)` interprets the given string as JSON and returns the resulting data structure.
+* `jsonencode(val)` returns a JSON-serialized version of the given value.
+* `length(collection)` returns the number of elements in the given collection (list, set, map, object, or tuple).
+* `lower(string)` returns the given string with all uppercase letters converted to lowercase.
+* `max(numbers...)` returns the greatest of the given numbers.
+* `min(numbers...)` returns the smallest of the given numbers.
+* `reverse(string)` returns the given string with all of the characters in reverse order.
+* `strlen(string)` returns the number of characters in the given string.
+* `substr(string, offset, length)` returns the requested substring of the given string.
+* `upper(string)` returns the given string with all lowercase letters converted to uppercase.
+
+Note that these expressions are valid in the context of the _spec_ file, not
+the _input_. Functions can be exposed into the input file using
+[Custom Functions](#custom-functions) within the spec, which may in turn
+refer to these spec definition functions.
+
+## Type Expressions
+
+Type expressions are used to describe the expected type of an attribute, as
+an additional validation constraint.
+
+A type expression uses primitive type names and compound type constructors.
+A type constructor builds a new type based on one or more type expression
+arguments.
+
+The following type names and type constructors are supported:
+
+* `any` is a wildcard that accepts a value of any type. (In HCL terms, this
+  is the _dynamic pseudo-type_.)
+* `string` is a Unicode string.
+* `number` is an arbitrary-precision floating point number.
+* `bool` is a boolean value (`true` or `false`)
+* `list(element_type)` constructs a list type with the given element type
+* `set(element_type)` constructs a set type with the given element type
+* `map(element_type)` constructs a map type with the given element type
+* `object({name1 = element_type, name2 = element_type, ...})` constructs
+  an object type with the given attribute types.
+* `tuple([element_type, element_type, ...])` constructs a tuple type with
+  the given element types. This can be used, for example, to require an
+  array with a particular number of elements, or with elements of different
+  types.
+
+The above types are as defined by
+[the HCL syntax-agnostic information model](../../spec.md). After
+validation, values are lowered to JSON's type system, which is a subset
+of the HCL type system.
+
+`null` is a valid value of any type, and not a type itself.
diff --git a/cmd/hcldec/spec.go b/cmd/hcldec/spec.go
new file mode 100644
index 0000000..b592cf9
--- /dev/null
+++ b/cmd/hcldec/spec.go
@@ -0,0 +1,645 @@
+package main
+
+import (
+	"fmt"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/ext/userfunc"
+	"github.com/hashicorp/hcl/v2/gohcl"
+	"github.com/hashicorp/hcl/v2/hcldec"
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/function"
+)
+
+type specFileContent struct {
+	Variables map[string]cty.Value
+	Functions map[string]function.Function
+	RootSpec  hcldec.Spec
+}
+
+var specCtx = &hcl.EvalContext{
+	Functions: specFuncs,
+}
+
+func loadSpecFile(filename string) (specFileContent, hcl.Diagnostics) {
+	file, diags := parser.ParseHCLFile(filename)
+	if diags.HasErrors() {
+		return specFileContent{RootSpec: errSpec}, diags
+	}
+
+	vars, funcs, specBody, declDiags := decodeSpecDecls(file.Body)
+	diags = append(diags, declDiags...)
+
+	spec, specDiags := decodeSpecRoot(specBody)
+	diags = append(diags, specDiags...)
+
+	return specFileContent{
+		Variables: vars,
+		Functions: funcs,
+		RootSpec:  spec,
+	}, diags
+}
+
+func decodeSpecDecls(body hcl.Body) (map[string]cty.Value, map[string]function.Function, hcl.Body, hcl.Diagnostics) {
+	funcs, body, diags := userfunc.DecodeUserFunctions(body, "function", func() *hcl.EvalContext {
+		return specCtx
+	})
+
+	content, body, moreDiags := body.PartialContent(&hcl.BodySchema{
+		Blocks: []hcl.BlockHeaderSchema{
+			{
+				Type: "variables",
+			},
+		},
+	})
+	diags = append(diags, moreDiags...)
+
+	vars := make(map[string]cty.Value)
+	for _, block := range content.Blocks {
+		// We only have one block type in our schema, so we can assume all
+		// blocks are of that type.
+		attrs, moreDiags := block.Body.JustAttributes()
+		diags = append(diags, moreDiags...)
+
+		for name, attr := range attrs {
+			val, moreDiags := attr.Expr.Value(specCtx)
+			diags = append(diags, moreDiags...)
+			vars[name] = val
+		}
+	}
+
+	return vars, funcs, body, diags
+}
+
+func decodeSpecRoot(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) {
+	content, diags := body.Content(specSchemaUnlabelled)
+
+	if len(content.Blocks) == 0 {
+		if diags.HasErrors() {
+			// If we already have errors then they probably explain
+			// why we have no blocks, so we'll skip our additional
+			// error message added below.
+			return errSpec, diags
+		}
+
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Missing spec block",
+			Detail:   "A spec file must have exactly one root block specifying how to map to a JSON value.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+		return errSpec, diags
+	}
+
+	if len(content.Blocks) > 1 {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Extraneous spec block",
+			Detail:   "A spec file must have exactly one root block specifying how to map to a JSON value.",
+			Subject:  &content.Blocks[1].DefRange,
+		})
+		return errSpec, diags
+	}
+
+	spec, specDiags := decodeSpecBlock(content.Blocks[0])
+	diags = append(diags, specDiags...)
+	return spec, diags
+}
+
+func decodeSpecBlock(block *hcl.Block) (hcldec.Spec, hcl.Diagnostics) {
+	var impliedName string
+	if len(block.Labels) > 0 {
+		impliedName = block.Labels[0]
+	}
+
+	switch block.Type {
+
+	case "object":
+		return decodeObjectSpec(block.Body)
+
+	case "array":
+		return decodeArraySpec(block.Body)
+
+	case "attr":
+		return decodeAttrSpec(block.Body, impliedName)
+
+	case "block":
+		return decodeBlockSpec(block.Body, impliedName)
+
+	case "block_list":
+		return decodeBlockListSpec(block.Body, impliedName)
+
+	case "block_set":
+		return decodeBlockSetSpec(block.Body, impliedName)
+
+	case "block_map":
+		return decodeBlockMapSpec(block.Body, impliedName)
+
+	case "block_attrs":
+		return decodeBlockAttrsSpec(block.Body, impliedName)
+
+	case "default":
+		return decodeDefaultSpec(block.Body)
+
+	case "transform":
+		return decodeTransformSpec(block.Body)
+
+	case "literal":
+		return decodeLiteralSpec(block.Body)
+
+	default:
+		// Should never happen, because the above cases should be exhaustive
+		// for our schema.
+		var diags hcl.Diagnostics
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Invalid spec block",
+			Detail:   fmt.Sprintf("Blocks of type %q are not expected here.", block.Type),
+			Subject:  &block.TypeRange,
+		})
+		return errSpec, diags
+	}
+}
+
+func decodeObjectSpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) {
+	content, diags := body.Content(specSchemaLabelled)
+
+	spec := make(hcldec.ObjectSpec)
+	for _, block := range content.Blocks {
+		propSpec, propDiags := decodeSpecBlock(block)
+		diags = append(diags, propDiags...)
+		spec[block.Labels[0]] = propSpec
+	}
+
+	return spec, diags
+}
+
+func decodeArraySpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) {
+	content, diags := body.Content(specSchemaUnlabelled)
+
+	spec := make(hcldec.TupleSpec, 0, len(content.Blocks))
+	for _, block := range content.Blocks {
+		elemSpec, elemDiags := decodeSpecBlock(block)
+		diags = append(diags, elemDiags...)
+		spec = append(spec, elemSpec)
+	}
+
+	return spec, diags
+}
+
+func decodeAttrSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) {
+	type content struct {
+		Name     *string        `hcl:"name"`
+		Type     hcl.Expression `hcl:"type"`
+		Required *bool          `hcl:"required"`
+	}
+
+	var args content
+	diags := gohcl.DecodeBody(body, nil, &args)
+	if diags.HasErrors() {
+		return errSpec, diags
+	}
+
+	spec := &hcldec.AttrSpec{
+		Name: impliedName,
+	}
+
+	if args.Required != nil {
+		spec.Required = *args.Required
+	}
+	if args.Name != nil {
+		spec.Name = *args.Name
+	}
+
+	var typeDiags hcl.Diagnostics
+	spec.Type, typeDiags = evalTypeExpr(args.Type)
+	diags = append(diags, typeDiags...)
+
+	if spec.Name == "" {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Missing name in attribute spec",
+			Detail:   "The name attribute is required, to specify the attribute name that is expected in an input HCL file.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+		return errSpec, diags
+	}
+
+	return spec, diags
+}
+
+func decodeBlockSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) {
+	type content struct {
+		TypeName *string  `hcl:"block_type"`
+		Required *bool    `hcl:"required"`
+		Nested   hcl.Body `hcl:",remain"`
+	}
+
+	var args content
+	diags := gohcl.DecodeBody(body, nil, &args)
+	if diags.HasErrors() {
+		return errSpec, diags
+	}
+
+	spec := &hcldec.BlockSpec{
+		TypeName: impliedName,
+	}
+
+	if args.Required != nil {
+		spec.Required = *args.Required
+	}
+	if args.TypeName != nil {
+		spec.TypeName = *args.TypeName
+	}
+
+	nested, nestedDiags := decodeBlockNestedSpec(args.Nested)
+	diags = append(diags, nestedDiags...)
+	spec.Nested = nested
+
+	return spec, diags
+}
+
+func decodeBlockListSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) {
+	type content struct {
+		TypeName *string  `hcl:"block_type"`
+		MinItems *int     `hcl:"min_items"`
+		MaxItems *int     `hcl:"max_items"`
+		Nested   hcl.Body `hcl:",remain"`
+	}
+
+	var args content
+	diags := gohcl.DecodeBody(body, nil, &args)
+	if diags.HasErrors() {
+		return errSpec, diags
+	}
+
+	spec := &hcldec.BlockListSpec{
+		TypeName: impliedName,
+	}
+
+	if args.MinItems != nil {
+		spec.MinItems = *args.MinItems
+	}
+	if args.MaxItems != nil {
+		spec.MaxItems = *args.MaxItems
+	}
+	if args.TypeName != nil {
+		spec.TypeName = *args.TypeName
+	}
+
+	nested, nestedDiags := decodeBlockNestedSpec(args.Nested)
+	diags = append(diags, nestedDiags...)
+	spec.Nested = nested
+
+	if spec.TypeName == "" {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Missing block_type in block_list spec",
+			Detail:   "The block_type attribute is required, to specify the block type name that is expected in an input HCL file.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+		return errSpec, diags
+	}
+
+	return spec, diags
+}
+
+func decodeBlockSetSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) {
+	type content struct {
+		TypeName *string  `hcl:"block_type"`
+		MinItems *int     `hcl:"min_items"`
+		MaxItems *int     `hcl:"max_items"`
+		Nested   hcl.Body `hcl:",remain"`
+	}
+
+	var args content
+	diags := gohcl.DecodeBody(body, nil, &args)
+	if diags.HasErrors() {
+		return errSpec, diags
+	}
+
+	spec := &hcldec.BlockSetSpec{
+		TypeName: impliedName,
+	}
+
+	if args.MinItems != nil {
+		spec.MinItems = *args.MinItems
+	}
+	if args.MaxItems != nil {
+		spec.MaxItems = *args.MaxItems
+	}
+	if args.TypeName != nil {
+		spec.TypeName = *args.TypeName
+	}
+
+	nested, nestedDiags := decodeBlockNestedSpec(args.Nested)
+	diags = append(diags, nestedDiags...)
+	spec.Nested = nested
+
+	if spec.TypeName == "" {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Missing block_type in block_set spec",
+			Detail:   "The block_type attribute is required, to specify the block type name that is expected in an input HCL file.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+		return errSpec, diags
+	}
+
+	return spec, diags
+}
+
+func decodeBlockMapSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) {
+	type content struct {
+		TypeName *string  `hcl:"block_type"`
+		Labels   []string `hcl:"labels"`
+		Nested   hcl.Body `hcl:",remain"`
+	}
+
+	var args content
+	diags := gohcl.DecodeBody(body, nil, &args)
+	if diags.HasErrors() {
+		return errSpec, diags
+	}
+
+	spec := &hcldec.BlockMapSpec{
+		TypeName: impliedName,
+	}
+
+	if args.TypeName != nil {
+		spec.TypeName = *args.TypeName
+	}
+	spec.LabelNames = args.Labels
+
+	nested, nestedDiags := decodeBlockNestedSpec(args.Nested)
+	diags = append(diags, nestedDiags...)
+	spec.Nested = nested
+
+	if spec.TypeName == "" {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Missing block_type in block_map spec",
+			Detail:   "The block_type attribute is required, to specify the block type name that is expected in an input HCL file.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+		return errSpec, diags
+	}
+	if len(spec.LabelNames) < 1 {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Invalid block label name list",
+			Detail:   "A block_map must have at least one label specified.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+		return errSpec, diags
+	}
+
+	if hcldec.ImpliedType(spec).HasDynamicTypes() {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Invalid block_map spec",
+			Detail:   "A block_map spec may not contain attributes with type 'any'.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+	}
+
+	return spec, diags
+}
+
+func decodeBlockNestedSpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) {
+	content, diags := body.Content(specSchemaUnlabelled)
+
+	if len(content.Blocks) == 0 {
+		if diags.HasErrors() {
+			// If we already have errors then they probably explain
+			// why we have no blocks, so we'll skip our additional
+			// error message added below.
+			return errSpec, diags
+		}
+
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Missing spec block",
+			Detail:   "A block spec must have exactly one child spec specifying how to decode block contents.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+		return errSpec, diags
+	}
+
+	if len(content.Blocks) > 1 {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Extraneous spec block",
+			Detail:   "A block spec must have exactly one child spec specifying how to decode block contents.",
+			Subject:  &content.Blocks[1].DefRange,
+		})
+		return errSpec, diags
+	}
+
+	spec, specDiags := decodeSpecBlock(content.Blocks[0])
+	diags = append(diags, specDiags...)
+	return spec, diags
+}
+
+func decodeBlockAttrsSpec(body hcl.Body, impliedName string) (hcldec.Spec, hcl.Diagnostics) {
+	type content struct {
+		TypeName    *string        `hcl:"block_type"`
+		ElementType hcl.Expression `hcl:"element_type"`
+		Required    *bool          `hcl:"required"`
+	}
+
+	var args content
+	diags := gohcl.DecodeBody(body, nil, &args)
+	if diags.HasErrors() {
+		return errSpec, diags
+	}
+
+	spec := &hcldec.BlockAttrsSpec{
+		TypeName: impliedName,
+	}
+
+	if args.Required != nil {
+		spec.Required = *args.Required
+	}
+	if args.TypeName != nil {
+		spec.TypeName = *args.TypeName
+	}
+
+	var typeDiags hcl.Diagnostics
+	spec.ElementType, typeDiags = evalTypeExpr(args.ElementType)
+	diags = append(diags, typeDiags...)
+
+	if spec.TypeName == "" {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Missing block_type in block_attrs spec",
+			Detail:   "The block_type attribute is required, to specify the block type name that is expected in an input HCL file.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+		return errSpec, diags
+	}
+
+	return spec, diags
+}
+
+func decodeLiteralSpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) {
+	type content struct {
+		Value cty.Value `hcl:"value"`
+	}
+
+	var args content
+	diags := gohcl.DecodeBody(body, specCtx, &args)
+	if diags.HasErrors() {
+		return errSpec, diags
+	}
+
+	return &hcldec.LiteralSpec{
+		Value: args.Value,
+	}, diags
+}
+
+func decodeDefaultSpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) {
+	content, diags := body.Content(specSchemaUnlabelled)
+
+	if len(content.Blocks) == 0 {
+		if diags.HasErrors() {
+			// If we already have errors then they probably explain
+			// why we have no blocks, so we'll skip our additional
+			// error message added below.
+			return errSpec, diags
+		}
+
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Missing spec block",
+			Detail:   "A default block must have at least one nested spec, each specifying a possible outcome.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+		return errSpec, diags
+	}
+
+	if len(content.Blocks) == 1 && !diags.HasErrors() {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagWarning,
+			Summary:  "Useless default block",
+			Detail:   "A default block with only one spec is equivalent to using that spec alone.",
+			Subject:  &content.Blocks[1].DefRange,
+		})
+	}
+
+	var spec hcldec.Spec
+	for _, block := range content.Blocks {
+		candidateSpec, candidateDiags := decodeSpecBlock(block)
+		diags = append(diags, candidateDiags...)
+		if candidateDiags.HasErrors() {
+			continue
+		}
+
+		if spec == nil {
+			spec = candidateSpec
+		} else {
+			spec = &hcldec.DefaultSpec{
+				Primary: spec,
+				Default: candidateSpec,
+			}
+		}
+	}
+
+	return spec, diags
+}
+
+func decodeTransformSpec(body hcl.Body) (hcldec.Spec, hcl.Diagnostics) {
+	type content struct {
+		Result hcl.Expression `hcl:"result"`
+		Nested hcl.Body       `hcl:",remain"`
+	}
+
+	var args content
+	diags := gohcl.DecodeBody(body, nil, &args)
+	if diags.HasErrors() {
+		return errSpec, diags
+	}
+
+	spec := &hcldec.TransformExprSpec{
+		Expr:         args.Result,
+		VarName:      "nested",
+		TransformCtx: specCtx,
+	}
+
+	nestedContent, nestedDiags := args.Nested.Content(specSchemaUnlabelled)
+	diags = append(diags, nestedDiags...)
+
+	if len(nestedContent.Blocks) != 1 {
+		if nestedDiags.HasErrors() {
+			// If we already have errors then they probably explain
+			// why we have the wrong number of blocks, so we'll skip our
+			// additional error message added below.
+			return errSpec, diags
+		}
+
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Invalid transform spec",
+			Detail:   "A transform spec block must have exactly one nested spec block.",
+			Subject:  body.MissingItemRange().Ptr(),
+		})
+		return errSpec, diags
+	}
+
+	nestedSpec, nestedDiags := decodeSpecBlock(nestedContent.Blocks[0])
+	diags = append(diags, nestedDiags...)
+	spec.Wrapped = nestedSpec
+
+	return spec, diags
+}
+
+var errSpec = &hcldec.LiteralSpec{
+	Value: cty.NullVal(cty.DynamicPseudoType),
+}
+
+var specBlockTypes = []string{
+	"object",
+	"array",
+
+	"literal",
+
+	"attr",
+
+	"block",
+	"block_list",
+	"block_map",
+	"block_set",
+
+	"default",
+	"transform",
+}
+
+var specSchemaUnlabelled *hcl.BodySchema
+var specSchemaLabelled *hcl.BodySchema
+
+var specSchemaLabelledLabels = []string{"key"}
+
+func init() {
+	specSchemaLabelled = &hcl.BodySchema{
+		Blocks: make([]hcl.BlockHeaderSchema, 0, len(specBlockTypes)),
+	}
+	specSchemaUnlabelled = &hcl.BodySchema{
+		Blocks: make([]hcl.BlockHeaderSchema, 0, len(specBlockTypes)),
+	}
+
+	for _, name := range specBlockTypes {
+		specSchemaLabelled.Blocks = append(
+			specSchemaLabelled.Blocks,
+			hcl.BlockHeaderSchema{
+				Type:       name,
+				LabelNames: specSchemaLabelledLabels,
+			},
+		)
+		specSchemaUnlabelled.Blocks = append(
+			specSchemaUnlabelled.Blocks,
+			hcl.BlockHeaderSchema{
+				Type: name,
+			},
+		)
+	}
+}
diff --git a/cmd/hcldec/spec_funcs.go b/cmd/hcldec/spec_funcs.go
new file mode 100644
index 0000000..99c8ea6
--- /dev/null
+++ b/cmd/hcldec/spec_funcs.go
@@ -0,0 +1,24 @@
+package main
+
+import (
+	"github.com/zclconf/go-cty/cty/function"
+	"github.com/zclconf/go-cty/cty/function/stdlib"
+)
+
+var specFuncs = map[string]function.Function{
+	"abs":        stdlib.AbsoluteFunc,
+	"coalesce":   stdlib.CoalesceFunc,
+	"concat":     stdlib.ConcatFunc,
+	"hasindex":   stdlib.HasIndexFunc,
+	"int":        stdlib.IntFunc,
+	"jsondecode": stdlib.JSONDecodeFunc,
+	"jsonencode": stdlib.JSONEncodeFunc,
+	"length":     stdlib.LengthFunc,
+	"lower":      stdlib.LowerFunc,
+	"max":        stdlib.MaxFunc,
+	"min":        stdlib.MinFunc,
+	"reverse":    stdlib.ReverseFunc,
+	"strlen":     stdlib.StrlenFunc,
+	"substr":     stdlib.SubstrFunc,
+	"upper":      stdlib.UpperFunc,
+}
diff --git a/cmd/hcldec/type_expr.go b/cmd/hcldec/type_expr.go
new file mode 100644
index 0000000..3e36420
--- /dev/null
+++ b/cmd/hcldec/type_expr.go
@@ -0,0 +1,129 @@
+package main
+
+import (
+	"fmt"
+	"reflect"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/function"
+)
+
+var typeType = cty.Capsule("type", reflect.TypeOf(cty.NilType))
+
+var typeEvalCtx = &hcl.EvalContext{
+	Variables: map[string]cty.Value{
+		"string": wrapTypeType(cty.String),
+		"bool":   wrapTypeType(cty.Bool),
+		"number": wrapTypeType(cty.Number),
+		"any":    wrapTypeType(cty.DynamicPseudoType),
+	},
+	Functions: map[string]function.Function{
+		"list": function.New(&function.Spec{
+			Params: []function.Parameter{
+				{
+					Name: "element_type",
+					Type: typeType,
+				},
+			},
+			Type: function.StaticReturnType(typeType),
+			Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+				ety := unwrapTypeType(args[0])
+				ty := cty.List(ety)
+				return wrapTypeType(ty), nil
+			},
+		}),
+		"set": function.New(&function.Spec{
+			Params: []function.Parameter{
+				{
+					Name: "element_type",
+					Type: typeType,
+				},
+			},
+			Type: function.StaticReturnType(typeType),
+			Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+				ety := unwrapTypeType(args[0])
+				ty := cty.Set(ety)
+				return wrapTypeType(ty), nil
+			},
+		}),
+		"map": function.New(&function.Spec{
+			Params: []function.Parameter{
+				{
+					Name: "element_type",
+					Type: typeType,
+				},
+			},
+			Type: function.StaticReturnType(typeType),
+			Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+				ety := unwrapTypeType(args[0])
+				ty := cty.Map(ety)
+				return wrapTypeType(ty), nil
+			},
+		}),
+		"tuple": function.New(&function.Spec{
+			Params: []function.Parameter{
+				{
+					Name: "element_types",
+					Type: cty.List(typeType),
+				},
+			},
+			Type: function.StaticReturnType(typeType),
+			Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+				etysVal := args[0]
+				etys := make([]cty.Type, 0, etysVal.LengthInt())
+				for it := etysVal.ElementIterator(); it.Next(); {
+					_, wrapEty := it.Element()
+					etys = append(etys, unwrapTypeType(wrapEty))
+				}
+				ty := cty.Tuple(etys)
+				return wrapTypeType(ty), nil
+			},
+		}),
+		"object": function.New(&function.Spec{
+			Params: []function.Parameter{
+				{
+					Name: "attribute_types",
+					Type: cty.Map(typeType),
+				},
+			},
+			Type: function.StaticReturnType(typeType),
+			Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+				atysVal := args[0]
+				atys := make(map[string]cty.Type)
+				for it := atysVal.ElementIterator(); it.Next(); {
+					nameVal, wrapAty := it.Element()
+					name := nameVal.AsString()
+					atys[name] = unwrapTypeType(wrapAty)
+				}
+				ty := cty.Object(atys)
+				return wrapTypeType(ty), nil
+			},
+		}),
+	},
+}
+
+func evalTypeExpr(expr hcl.Expression) (cty.Type, hcl.Diagnostics) {
+	result, diags := expr.Value(typeEvalCtx)
+	if result.IsNull() {
+		return cty.DynamicPseudoType, diags
+	}
+	if !result.Type().Equals(typeType) {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Invalid type expression",
+			Detail:   fmt.Sprintf("A type is required, not %s.", result.Type().FriendlyName()),
+		})
+		return cty.DynamicPseudoType, diags
+	}
+
+	return unwrapTypeType(result), diags
+}
+
+func wrapTypeType(ty cty.Type) cty.Value {
+	return cty.CapsuleVal(typeType, &ty)
+}
+
+func unwrapTypeType(val cty.Value) cty.Type {
+	return *(val.EncapsulatedValue().(*cty.Type))
+}
diff --git a/cmd/hcldec/vars.go b/cmd/hcldec/vars.go
new file mode 100644
index 0000000..d3971cb
--- /dev/null
+++ b/cmd/hcldec/vars.go
@@ -0,0 +1,74 @@
+package main
+
+import (
+	"fmt"
+	"strings"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+func parseVarsArg(src string, argIdx int) (map[string]cty.Value, hcl.Diagnostics) {
+	fakeFn := fmt.Sprintf("<vars argument %d>", argIdx)
+	f, diags := parser.ParseJSON([]byte(src), fakeFn)
+	if f == nil {
+		return nil, diags
+	}
+	vals, valsDiags := parseVarsBody(f.Body)
+	diags = append(diags, valsDiags...)
+	return vals, diags
+}
+
+func parseVarsFile(filename string) (map[string]cty.Value, hcl.Diagnostics) {
+	var f *hcl.File
+	var diags hcl.Diagnostics
+
+	if strings.HasSuffix(filename, ".json") {
+		f, diags = parser.ParseJSONFile(filename)
+	} else {
+		f, diags = parser.ParseHCLFile(filename)
+	}
+
+	if f == nil {
+		return nil, diags
+	}
+
+	vals, valsDiags := parseVarsBody(f.Body)
+	diags = append(diags, valsDiags...)
+	return vals, diags
+
+}
+
+func parseVarsBody(body hcl.Body) (map[string]cty.Value, hcl.Diagnostics) {
+	attrs, diags := body.JustAttributes()
+	if attrs == nil {
+		return nil, diags
+	}
+
+	vals := make(map[string]cty.Value, len(attrs))
+	for name, attr := range attrs {
+		val, valDiags := attr.Expr.Value(nil)
+		diags = append(diags, valDiags...)
+		vals[name] = val
+	}
+	return vals, diags
+}
+
+// varSpecs is an implementation of pflag.Value that accumulates a list of
+// raw values, ignoring any quoting. This is similar to pflag.StringSlice
+// but does not complain if there are literal quotes inside the value, which
+// is important for us to accept JSON literals here.
+type varSpecs []string
+
+func (vs *varSpecs) String() string {
+	return strings.Join([]string(*vs), ", ")
+}
+
+func (vs *varSpecs) Set(new string) error {
+	*vs = append(*vs, new)
+	return nil
+}
+
+func (vs *varSpecs) Type() string {
+	return "json-or-file"
+}
diff --git a/cmd/hclfmt/main.go b/cmd/hclfmt/main.go
new file mode 100644
index 0000000..01a8d41
--- /dev/null
+++ b/cmd/hclfmt/main.go
@@ -0,0 +1,148 @@
+package main
+
+import (
+	"bytes"
+	"errors"
+	"flag"
+	"fmt"
+	"io/ioutil"
+	"os"
+	"strings"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hclparse"
+	"github.com/hashicorp/hcl/v2/hclwrite"
+	"golang.org/x/crypto/ssh/terminal"
+)
+
+const versionStr = "0.0.1-dev"
+
+var (
+	check       = flag.Bool("check", false, "perform a syntax check on the given files and produce diagnostics")
+	reqNoChange = flag.Bool("require-no-change", false, "return a non-zero status if any files are changed during formatting")
+	overwrite   = flag.Bool("w", false, "overwrite source files instead of writing to stdout")
+	showVersion = flag.Bool("version", false, "show the version number and immediately exit")
+)
+
+var parser = hclparse.NewParser()
+var diagWr hcl.DiagnosticWriter // initialized in init
+var checkErrs = false
+var changed []string
+
+func init() {
+	color := terminal.IsTerminal(int(os.Stderr.Fd()))
+	w, _, err := terminal.GetSize(int(os.Stdout.Fd()))
+	if err != nil {
+		w = 80
+	}
+	diagWr = hcl.NewDiagnosticTextWriter(os.Stderr, parser.Files(), uint(w), color)
+}
+
+func main() {
+	err := realmain()
+
+	if err != nil {
+		fmt.Fprintln(os.Stderr, err.Error())
+		os.Exit(1)
+	}
+}
+
+func realmain() error {
+	flag.Usage = usage
+	flag.Parse()
+
+	if *showVersion {
+		fmt.Println(versionStr)
+		return nil
+	}
+
+	err := processFiles()
+	if err != nil {
+		return err
+	}
+
+	if checkErrs {
+		return errors.New("one or more files contained errors")
+	}
+
+	if *reqNoChange {
+		if len(changed) != 0 {
+			return fmt.Errorf("file(s) were changed: %s", strings.Join(changed, ", "))
+		}
+	}
+
+	return nil
+}
+
+func processFiles() error {
+	if flag.NArg() == 0 {
+		if *overwrite {
+			return errors.New("error: cannot use -w without source filenames")
+		}
+
+		return processFile("<stdin>", os.Stdin)
+	}
+
+	for i := 0; i < flag.NArg(); i++ {
+		path := flag.Arg(i)
+		switch dir, err := os.Stat(path); {
+		case err != nil:
+			return err
+		case dir.IsDir():
+			// This tool can't walk a whole directory because it doesn't
+			// know what file naming schemes will be used by different
+			// HCL-embedding applications, so it'll leave that sort of
+			// functionality for apps themselves to implement.
+			return fmt.Errorf("can't format directory %s", path)
+		default:
+			if err := processFile(path, nil); err != nil {
+				return err
+			}
+		}
+	}
+
+	return nil
+}
+
+func processFile(fn string, in *os.File) error {
+	var err error
+	if in == nil {
+		in, err = os.Open(fn)
+		if err != nil {
+			return fmt.Errorf("failed to open %s: %s", fn, err)
+		}
+	}
+
+	inSrc, err := ioutil.ReadAll(in)
+	if err != nil {
+		return fmt.Errorf("failed to read %s: %s", fn, err)
+	}
+
+	if *check {
+		_, diags := parser.ParseHCL(inSrc, fn)
+		diagWr.WriteDiagnostics(diags)
+		if diags.HasErrors() {
+			checkErrs = true
+			return nil
+		}
+	}
+
+	outSrc := hclwrite.Format(inSrc)
+
+	if !bytes.Equal(inSrc, outSrc) {
+		changed = append(changed, fn)
+	}
+
+	if *overwrite {
+		return ioutil.WriteFile(fn, outSrc, 0644)
+	}
+
+	_, err = os.Stdout.Write(outSrc)
+	return err
+}
+
+func usage() {
+	fmt.Fprintf(os.Stderr, "usage: hclfmt [flags] [path ...]\n")
+	flag.PrintDefaults()
+	os.Exit(2)
+}
diff --git a/cmd/hclspecsuite/README.md b/cmd/hclspecsuite/README.md
new file mode 100644
index 0000000..0f7badc
--- /dev/null
+++ b/cmd/hclspecsuite/README.md
@@ -0,0 +1,4 @@
+# `hclspecsuite`
+
+`hclspecsuite` is the test harness for
+[the HCL specification test suite](../../specsuite/README.md).
diff --git a/cmd/hclspecsuite/diagnostics.go b/cmd/hclspecsuite/diagnostics.go
new file mode 100644
index 0000000..129fdd2
--- /dev/null
+++ b/cmd/hclspecsuite/diagnostics.go
@@ -0,0 +1,108 @@
+package main
+
+import (
+	"encoding/json"
+	"fmt"
+
+	"github.com/hashicorp/hcl/v2"
+)
+
+func decodeJSONDiagnostics(src []byte) hcl.Diagnostics {
+	type PosJSON struct {
+		Line   int `json:"line"`
+		Column int `json:"column"`
+		Byte   int `json:"byte"`
+	}
+	type RangeJSON struct {
+		Filename string  `json:"filename"`
+		Start    PosJSON `json:"start"`
+		End      PosJSON `json:"end"`
+	}
+	type DiagnosticJSON struct {
+		Severity string     `json:"severity"`
+		Summary  string     `json:"summary"`
+		Detail   string     `json:"detail,omitempty"`
+		Subject  *RangeJSON `json:"subject,omitempty"`
+	}
+	type DiagnosticsJSON struct {
+		Diagnostics []DiagnosticJSON `json:"diagnostics"`
+	}
+
+	var raw DiagnosticsJSON
+	var diags hcl.Diagnostics
+	err := json.Unmarshal(src, &raw)
+	if err != nil {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Failed to parse hcldec diagnostics result",
+			Detail:   fmt.Sprintf("Sub-program hcldec produced invalid diagnostics: %s.", err),
+		})
+		return diags
+	}
+
+	if len(raw.Diagnostics) == 0 {
+		return nil
+	}
+
+	diags = make(hcl.Diagnostics, 0, len(raw.Diagnostics))
+	for _, rawDiag := range raw.Diagnostics {
+		var severity hcl.DiagnosticSeverity
+		switch rawDiag.Severity {
+		case "error":
+			severity = hcl.DiagError
+		case "warning":
+			severity = hcl.DiagWarning
+		default:
+			diags = append(diags, &hcl.Diagnostic{
+				Severity: hcl.DiagError,
+				Summary:  "Failed to parse hcldec diagnostics result",
+				Detail:   fmt.Sprintf("Diagnostic has unsupported severity %q.", rawDiag.Severity),
+			})
+			continue
+		}
+
+		diag := &hcl.Diagnostic{
+			Severity: severity,
+			Summary:  rawDiag.Summary,
+			Detail:   rawDiag.Detail,
+		}
+		if rawDiag.Subject != nil {
+			rawRange := rawDiag.Subject
+			diag.Subject = &hcl.Range{
+				Filename: rawRange.Filename,
+				Start: hcl.Pos{
+					Line:   rawRange.Start.Line,
+					Column: rawRange.Start.Column,
+					Byte:   rawRange.Start.Byte,
+				},
+				End: hcl.Pos{
+					Line:   rawRange.End.Line,
+					Column: rawRange.End.Column,
+					Byte:   rawRange.End.Byte,
+				},
+			}
+		}
+		diags = append(diags, diag)
+	}
+
+	return diags
+}
+
+func severityString(severity hcl.DiagnosticSeverity) string {
+	switch severity {
+	case hcl.DiagError:
+		return "error"
+	case hcl.DiagWarning:
+		return "warning"
+	default:
+		return "unsupported-severity"
+	}
+}
+
+func rangeString(rng hcl.Range) string {
+	return fmt.Sprintf(
+		"from line %d column %d byte %d to line %d column %d byte %d",
+		rng.Start.Line, rng.Start.Column, rng.Start.Byte,
+		rng.End.Line, rng.End.Column, rng.End.Byte,
+	)
+}
diff --git a/cmd/hclspecsuite/log.go b/cmd/hclspecsuite/log.go
new file mode 100644
index 0000000..022c7be
--- /dev/null
+++ b/cmd/hclspecsuite/log.go
@@ -0,0 +1,8 @@
+package main
+
+import (
+	"github.com/hashicorp/hcl/v2"
+)
+
+type LogBeginCallback func(testName string, testFile *TestFile)
+type LogProblemsCallback func(testName string, testFile *TestFile, diags hcl.Diagnostics)
diff --git a/cmd/hclspecsuite/main.go b/cmd/hclspecsuite/main.go
new file mode 100644
index 0000000..384ee02
--- /dev/null
+++ b/cmd/hclspecsuite/main.go
@@ -0,0 +1,71 @@
+package main
+
+import (
+	"fmt"
+	"os"
+	"os/exec"
+
+	"golang.org/x/crypto/ssh/terminal"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hclparse"
+)
+
+func main() {
+	os.Exit(realMain(os.Args[1:]))
+}
+
+func realMain(args []string) int {
+	if len(args) != 2 {
+		fmt.Fprintf(os.Stderr, "Usage: hclspecsuite <tests-dir> <hcldec-file>\n")
+		return 2
+	}
+
+	testsDir := args[0]
+	hcldecPath := args[1]
+
+	hcldecPath, err := exec.LookPath(hcldecPath)
+	if err != nil {
+		fmt.Fprintf(os.Stderr, "%s\n", err)
+		return 2
+	}
+
+	parser := hclparse.NewParser()
+
+	color := terminal.IsTerminal(int(os.Stderr.Fd()))
+	w, _, err := terminal.GetSize(int(os.Stdout.Fd()))
+	if err != nil {
+		w = 80
+	}
+	diagWr := hcl.NewDiagnosticTextWriter(os.Stderr, parser.Files(), uint(w), color)
+	var diagCount int
+
+	runner := &Runner{
+		parser:     parser,
+		hcldecPath: hcldecPath,
+		baseDir:    testsDir,
+		logBegin: func(name string, file *TestFile) {
+			fmt.Printf("- %s\n", name)
+		},
+		logProblems: func(name string, file *TestFile, diags hcl.Diagnostics) {
+			if len(diags) != 0 {
+				os.Stderr.WriteString("\n")
+				diagWr.WriteDiagnostics(diags)
+				diagCount += len(diags)
+			}
+			fmt.Printf("- %s\n", name)
+		},
+	}
+	diags := runner.Run()
+
+	if len(diags) != 0 {
+		os.Stderr.WriteString("\n\n\n== Test harness problems:\n\n")
+		diagWr.WriteDiagnostics(diags)
+		diagCount += len(diags)
+	}
+
+	if diagCount > 0 {
+		return 2
+	}
+	return 0
+}
diff --git a/cmd/hclspecsuite/runner.go b/cmd/hclspecsuite/runner.go
new file mode 100644
index 0000000..db9b13f
--- /dev/null
+++ b/cmd/hclspecsuite/runner.go
@@ -0,0 +1,524 @@
+package main
+
+import (
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"io/ioutil"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"sort"
+	"strings"
+
+	"github.com/zclconf/go-cty-debug/ctydebug"
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/convert"
+	ctyjson "github.com/zclconf/go-cty/cty/json"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/ext/typeexpr"
+	"github.com/hashicorp/hcl/v2/hclparse"
+)
+
+type Runner struct {
+	parser      *hclparse.Parser
+	hcldecPath  string
+	baseDir     string
+	logBegin    LogBeginCallback
+	logProblems LogProblemsCallback
+}
+
+func (r *Runner) Run() hcl.Diagnostics {
+	return r.runDir(r.baseDir)
+}
+
+func (r *Runner) runDir(dir string) hcl.Diagnostics {
+	var diags hcl.Diagnostics
+
+	infos, err := ioutil.ReadDir(dir)
+	if err != nil {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Failed to read test directory",
+			Detail:   fmt.Sprintf("The directory %q could not be opened: %s.", dir, err),
+		})
+		return diags
+	}
+
+	var tests []string
+	var subDirs []string
+	for _, info := range infos {
+		name := info.Name()
+		if strings.HasPrefix(name, ".") {
+			continue
+		}
+
+		if info.IsDir() {
+			subDirs = append(subDirs, name)
+		}
+		if strings.HasSuffix(name, ".t") {
+			tests = append(tests, name)
+		}
+	}
+	sort.Strings(tests)
+	sort.Strings(subDirs)
+
+	for _, filename := range tests {
+		filename = filepath.Join(dir, filename)
+		testDiags := r.runTest(filename)
+		diags = append(diags, testDiags...)
+	}
+
+	for _, dirName := range subDirs {
+		dir := filepath.Join(dir, dirName)
+		dirDiags := r.runDir(dir)
+		diags = append(diags, dirDiags...)
+	}
+
+	return diags
+}
+
+func (r *Runner) runTest(filename string) hcl.Diagnostics {
+	prettyName := r.prettyTestName(filename)
+	tf, diags := r.LoadTestFile(filename)
+	if diags.HasErrors() {
+		// We'll still log, so it's clearer which test the diagnostics belong to.
+		if r.logBegin != nil {
+			r.logBegin(prettyName, nil)
+		}
+		if r.logProblems != nil {
+			r.logProblems(prettyName, nil, diags)
+			return nil // don't duplicate the diagnostics we already reported
+		}
+		return diags
+	}
+
+	if r.logBegin != nil {
+		r.logBegin(prettyName, tf)
+	}
+
+	basePath := filename[:len(filename)-2]
+	specFilename := basePath + ".hcldec"
+	nativeFilename := basePath + ".hcl"
+	jsonFilename := basePath + ".hcl.json"
+
+	// We'll add the source code of the spec file to our own parser, even
+	// though it'll actually be parsed by the hcldec child process, since that
+	// way we can produce nice diagnostic messages if hcldec fails to process
+	// the spec file.
+	src, err := ioutil.ReadFile(specFilename)
+	if err == nil {
+		r.parser.AddFile(specFilename, &hcl.File{
+			Bytes: src,
+		})
+	}
+
+	if _, err := os.Stat(specFilename); err != nil {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Missing .hcldec file",
+			Detail:   fmt.Sprintf("No specification file for test %s: %s.", prettyName, err),
+		})
+		return diags
+	}
+
+	if _, err := os.Stat(nativeFilename); err == nil {
+		moreDiags := r.runTestInput(specFilename, nativeFilename, tf)
+		diags = append(diags, moreDiags...)
+	}
+
+	if _, err := os.Stat(jsonFilename); err == nil {
+		moreDiags := r.runTestInput(specFilename, jsonFilename, tf)
+		diags = append(diags, moreDiags...)
+	}
+
+	if r.logProblems != nil {
+		r.logProblems(prettyName, nil, diags)
+		return nil // don't duplicate the diagnostics we already reported
+	}
+
+	return diags
+}
+
+func (r *Runner) runTestInput(specFilename, inputFilename string, tf *TestFile) hcl.Diagnostics {
+	// We'll add the source code of the input file to our own parser, even
+	// though it'll actually be parsed by the hcldec child process, since that
+	// way we can produce nice diagnostic messages if hcldec fails to process
+	// the input file.
+	src, err := ioutil.ReadFile(inputFilename)
+	if err == nil {
+		r.parser.AddFile(inputFilename, &hcl.File{
+			Bytes: src,
+		})
+	}
+
+	var diags hcl.Diagnostics
+
+	if tf.ChecksTraversals {
+		gotTraversals, moreDiags := r.hcldecVariables(specFilename, inputFilename)
+		diags = append(diags, moreDiags...)
+		if !moreDiags.HasErrors() {
+			expected := tf.ExpectedTraversals
+			for _, got := range gotTraversals {
+				e := findTraversalSpec(got, expected)
+				rng := got.SourceRange()
+				if e == nil {
+					diags = append(diags, &hcl.Diagnostic{
+						Severity: hcl.DiagError,
+						Summary:  "Unexpected traversal",
+						Detail:   "Detected traversal that is not indicated as expected in the test file.",
+						Subject:  &rng,
+					})
+				} else {
+					moreDiags := checkTraversalsMatch(got, inputFilename, e)
+					diags = append(diags, moreDiags...)
+				}
+			}
+
+			// Look for any traversals that didn't show up at all.
+			for _, e := range expected {
+				if t := findTraversalForSpec(e, gotTraversals); t == nil {
+					diags = append(diags, &hcl.Diagnostic{
+						Severity: hcl.DiagError,
+						Summary:  "Missing expected traversal",
+						Detail:   "This expected traversal was not detected.",
+						Subject:  e.Traversal.SourceRange().Ptr(),
+					})
+				}
+			}
+		}
+
+	}
+
+	val, transformDiags := r.hcldecTransform(specFilename, inputFilename)
+	if len(tf.ExpectedDiags) == 0 {
+		diags = append(diags, transformDiags...)
+		if transformDiags.HasErrors() {
+			// If hcldec failed then there's no point in continuing.
+			return diags
+		}
+
+		if errs := val.Type().TestConformance(tf.ResultType); len(errs) > 0 {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity: hcl.DiagError,
+				Summary:  "Incorrect result type",
+				Detail: fmt.Sprintf(
+					"Input file %s produced %s, but was expecting %s.",
+					inputFilename, typeexpr.TypeString(val.Type()), typeexpr.TypeString(tf.ResultType),
+				),
+			})
+		}
+
+		if tf.Result != cty.NilVal {
+			cmpVal, err := convert.Convert(tf.Result, tf.ResultType)
+			if err != nil {
+				diags = append(diags, &hcl.Diagnostic{
+					Severity: hcl.DiagError,
+					Summary:  "Incorrect type for result value",
+					Detail: fmt.Sprintf(
+						"Result does not conform to the given result type: %s.", err,
+					),
+					Subject: &tf.ResultRange,
+				})
+			} else {
+				if !val.RawEquals(cmpVal) {
+					diags = append(diags, &hcl.Diagnostic{
+						Severity: hcl.DiagError,
+						Summary:  "Incorrect result value",
+						Detail: fmt.Sprintf(
+							"Input file %s produced %#v, but was expecting %#v.\n\n%s",
+							inputFilename, val, tf.Result,
+							ctydebug.DiffValues(tf.Result, val),
+						),
+					})
+				}
+			}
+		}
+	} else {
+		// We're expecting diagnostics, and so we'll need to correlate the
+		// severities and source ranges of our actual diagnostics against
+		// what we were expecting.
+		type DiagnosticEntry struct {
+			Severity hcl.DiagnosticSeverity
+			Range    hcl.Range
+		}
+		got := make(map[DiagnosticEntry]*hcl.Diagnostic)
+		want := make(map[DiagnosticEntry]hcl.Range)
+		for _, diag := range transformDiags {
+			if diag.Subject == nil {
+				// Sourceless diagnostics can never be expected, so we'll just
+				// pass these through as-is and assume they are hcldec
+				// operational errors.
+				diags = append(diags, diag)
+				continue
+			}
+			if diag.Subject.Filename != inputFilename {
+				// If the problem is for something other than the input file
+				// then it can't be expected.
+				diags = append(diags, diag)
+				continue
+			}
+			entry := DiagnosticEntry{
+				Severity: diag.Severity,
+				Range:    *diag.Subject,
+			}
+			got[entry] = diag
+		}
+		for _, e := range tf.ExpectedDiags {
+			e.Range.Filename = inputFilename // assumed here, since we don't allow any other filename to be expected
+			entry := DiagnosticEntry{
+				Severity: e.Severity,
+				Range:    e.Range,
+			}
+			want[entry] = e.DeclRange
+		}
+
+		for gotEntry, diag := range got {
+			if _, wanted := want[gotEntry]; !wanted {
+				// Pass through the diagnostic itself so the user can see what happened
+				diags = append(diags, diag)
+				diags = append(diags, &hcl.Diagnostic{
+					Severity: hcl.DiagError,
+					Summary:  "Unexpected diagnostic",
+					Detail: fmt.Sprintf(
+						"No %s diagnostic was expected %s. The unexpected diagnostic was shown above.",
+						severityString(gotEntry.Severity), rangeString(gotEntry.Range),
+					),
+					Subject: gotEntry.Range.Ptr(),
+				})
+			}
+		}
+
+		for wantEntry, declRange := range want {
+			if _, gotted := got[wantEntry]; !gotted {
+				diags = append(diags, &hcl.Diagnostic{
+					Severity: hcl.DiagError,
+					Summary:  "Missing expected diagnostic",
+					Detail: fmt.Sprintf(
+						"No %s diagnostic was generated %s.",
+						severityString(wantEntry.Severity), rangeString(wantEntry.Range),
+					),
+					Subject: declRange.Ptr(),
+				})
+			}
+		}
+	}
+
+	return diags
+}
+
+func (r *Runner) hcldecTransform(specFile, inputFile string) (cty.Value, hcl.Diagnostics) {
+	var diags hcl.Diagnostics
+	var outBuffer bytes.Buffer
+	var errBuffer bytes.Buffer
+
+	cmd := &exec.Cmd{
+		Path: r.hcldecPath,
+		Args: []string{
+			r.hcldecPath,
+			"--spec=" + specFile,
+			"--diags=json",
+			"--with-type",
+			"--keep-nulls",
+			inputFile,
+		},
+		Stdout: &outBuffer,
+		Stderr: &errBuffer,
+	}
+	err := cmd.Run()
+	if err != nil {
+		if _, isExit := err.(*exec.ExitError); !isExit {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity: hcl.DiagError,
+				Summary:  "Failed to run hcldec",
+				Detail:   fmt.Sprintf("Sub-program hcldec failed to start: %s.", err),
+			})
+			return cty.DynamicVal, diags
+		}
+
+		// If we exited unsuccessfully then we'll expect diagnostics on stderr
+		moreDiags := decodeJSONDiagnostics(errBuffer.Bytes())
+		diags = append(diags, moreDiags...)
+		return cty.DynamicVal, diags
+	} else {
+		// Otherwise, we expect a JSON result value on stdout. Since we used
+		// --with-type above, we can decode as DynamicPseudoType to recover
+		// exactly the type that was saved, without the usual JSON lossiness.
+		val, err := ctyjson.Unmarshal(outBuffer.Bytes(), cty.DynamicPseudoType)
+		if err != nil {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity: hcl.DiagError,
+				Summary:  "Failed to parse hcldec result",
+				Detail:   fmt.Sprintf("Sub-program hcldec produced an invalid result: %s.", err),
+			})
+			return cty.DynamicVal, diags
+		}
+		return val, diags
+	}
+}
+
+func (r *Runner) hcldecVariables(specFile, inputFile string) ([]hcl.Traversal, hcl.Diagnostics) {
+	var diags hcl.Diagnostics
+	var outBuffer bytes.Buffer
+	var errBuffer bytes.Buffer
+
+	cmd := &exec.Cmd{
+		Path: r.hcldecPath,
+		Args: []string{
+			r.hcldecPath,
+			"--spec=" + specFile,
+			"--diags=json",
+			"--var-refs",
+			inputFile,
+		},
+		Stdout: &outBuffer,
+		Stderr: &errBuffer,
+	}
+	err := cmd.Run()
+	if err != nil {
+		if _, isExit := err.(*exec.ExitError); !isExit {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity: hcl.DiagError,
+				Summary:  "Failed to run hcldec",
+				Detail:   fmt.Sprintf("Sub-program hcldec (evaluating input) failed to start: %s.", err),
+			})
+			return nil, diags
+		}
+
+		// If we exited unsuccessfully then we'll expect diagnostics on stderr
+		moreDiags := decodeJSONDiagnostics(errBuffer.Bytes())
+		diags = append(diags, moreDiags...)
+		return nil, diags
+	} else {
+		// Otherwise, we expect a JSON description of the traversals on stdout.
+		type PosJSON struct {
+			Line   int `json:"line"`
+			Column int `json:"column"`
+			Byte   int `json:"byte"`
+		}
+		type RangeJSON struct {
+			Filename string  `json:"filename"`
+			Start    PosJSON `json:"start"`
+			End      PosJSON `json:"end"`
+		}
+		type StepJSON struct {
+			Kind  string          `json:"kind"`
+			Name  string          `json:"name,omitempty"`
+			Key   json.RawMessage `json:"key,omitempty"`
+			Range RangeJSON       `json:"range"`
+		}
+		type TraversalJSON struct {
+			Steps []StepJSON `json:"steps"`
+		}
+
+		var raw []TraversalJSON
+		err := json.Unmarshal(outBuffer.Bytes(), &raw)
+		if err != nil {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity: hcl.DiagError,
+				Summary:  "Failed to parse hcldec result",
+				Detail:   fmt.Sprintf("Sub-program hcldec (with --var-refs) produced an invalid result: %s.", err),
+			})
+			return nil, diags
+		}
+
+		var ret []hcl.Traversal
+		if len(raw) == 0 {
+			return ret, diags
+		}
+
+		ret = make([]hcl.Traversal, 0, len(raw))
+		for _, rawT := range raw {
+			traversal := make(hcl.Traversal, 0, len(rawT.Steps))
+			for _, rawS := range rawT.Steps {
+				rng := hcl.Range{
+					Filename: rawS.Range.Filename,
+					Start: hcl.Pos{
+						Line:   rawS.Range.Start.Line,
+						Column: rawS.Range.Start.Column,
+						Byte:   rawS.Range.Start.Byte,
+					},
+					End: hcl.Pos{
+						Line:   rawS.Range.End.Line,
+						Column: rawS.Range.End.Column,
+						Byte:   rawS.Range.End.Byte,
+					},
+				}
+
+				switch rawS.Kind {
+
+				case "root":
+					traversal = append(traversal, hcl.TraverseRoot{
+						Name:     rawS.Name,
+						SrcRange: rng,
+					})
+
+				case "attr":
+					traversal = append(traversal, hcl.TraverseAttr{
+						Name:     rawS.Name,
+						SrcRange: rng,
+					})
+
+				case "index":
+					ty, err := ctyjson.ImpliedType([]byte(rawS.Key))
+					if err != nil {
+						diags = append(diags, &hcl.Diagnostic{
+							Severity: hcl.DiagError,
+							Summary:  "Failed to parse hcldec result",
+							Detail:   fmt.Sprintf("Sub-program hcldec (with --var-refs) produced an invalid result: traversal step has invalid index key %s.", rawS.Key),
+						})
+						return nil, diags
+					}
+					keyVal, err := ctyjson.Unmarshal([]byte(rawS.Key), ty)
+					if err != nil {
+						diags = append(diags, &hcl.Diagnostic{
+							Severity: hcl.DiagError,
+							Summary:  "Failed to parse hcldec result",
+							Detail:   fmt.Sprintf("Sub-program hcldec (with --var-refs) produced a result with an invalid index key %s: %s.", rawS.Key, err),
+						})
+						return nil, diags
+					}
+
+					traversal = append(traversal, hcl.TraverseIndex{
+						Key:      keyVal,
+						SrcRange: rng,
+					})
+
+				default:
+					// Should never happen since the above cases are exhaustive,
+					// but we'll catch it gracefully since this is coming from
+					// a possibly-buggy hcldec implementation that we're testing.
+					diags = append(diags, &hcl.Diagnostic{
+						Severity: hcl.DiagError,
+						Summary:  "Failed to parse hcldec result",
+						Detail:   fmt.Sprintf("Sub-program hcldec (with --var-refs) produced an invalid result: traversal step of unsupported kind %q.", rawS.Kind),
+					})
+					return nil, diags
+				}
+			}
+
+			ret = append(ret, traversal)
+		}
+		return ret, diags
+	}
+}
+
+func (r *Runner) prettyDirName(dir string) string {
+	rel, err := filepath.Rel(r.baseDir, dir)
+	if err != nil {
+		return filepath.ToSlash(dir)
+	}
+	return filepath.ToSlash(rel)
+}
+
+func (r *Runner) prettyTestName(filename string) string {
+	dir := filepath.Dir(filename)
+	dirName := r.prettyDirName(dir)
+	filename = filepath.Base(filename)
+	testName := filename[:len(filename)-2]
+	if dirName == "." {
+		return testName
+	}
+	return fmt.Sprintf("%s/%s", dirName, testName)
+}
diff --git a/cmd/hclspecsuite/test_file.go b/cmd/hclspecsuite/test_file.go
new file mode 100644
index 0000000..57ed35e
--- /dev/null
+++ b/cmd/hclspecsuite/test_file.go
@@ -0,0 +1,350 @@
+package main
+
+import (
+	"fmt"
+
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/convert"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/ext/typeexpr"
+	"github.com/hashicorp/hcl/v2/gohcl"
+)
+
+type TestFile struct {
+	Result     cty.Value
+	ResultType cty.Type
+
+	ChecksTraversals   bool
+	ExpectedTraversals []*TestFileExpectTraversal
+
+	ExpectedDiags []*TestFileExpectDiag
+
+	ResultRange     hcl.Range
+	ResultTypeRange hcl.Range
+}
+
+type TestFileExpectTraversal struct {
+	Traversal hcl.Traversal
+	Range     hcl.Range
+	DeclRange hcl.Range
+}
+
+type TestFileExpectDiag struct {
+	Severity  hcl.DiagnosticSeverity
+	Range     hcl.Range
+	DeclRange hcl.Range
+}
+
+func (r *Runner) LoadTestFile(filename string) (*TestFile, hcl.Diagnostics) {
+	f, diags := r.parser.ParseHCLFile(filename)
+	if diags.HasErrors() {
+		return nil, diags
+	}
+
+	content, moreDiags := f.Body.Content(testFileSchema)
+	diags = append(diags, moreDiags...)
+	if moreDiags.HasErrors() {
+		return nil, diags
+	}
+
+	ret := &TestFile{
+		ResultType: cty.DynamicPseudoType,
+	}
+
+	if typeAttr, exists := content.Attributes["result_type"]; exists {
+		ty, moreDiags := typeexpr.TypeConstraint(typeAttr.Expr)
+		diags = append(diags, moreDiags...)
+		if !moreDiags.HasErrors() {
+			ret.ResultType = ty
+		}
+		ret.ResultTypeRange = typeAttr.Expr.Range()
+	}
+
+	if resultAttr, exists := content.Attributes["result"]; exists {
+		resultVal, moreDiags := resultAttr.Expr.Value(nil)
+		diags = append(diags, moreDiags...)
+		if !moreDiags.HasErrors() {
+			resultVal, err := convert.Convert(resultVal, ret.ResultType)
+			if err != nil {
+				diags = diags.Append(&hcl.Diagnostic{
+					Severity: hcl.DiagError,
+					Summary:  "Invalid result value",
+					Detail:   fmt.Sprintf("The result value does not conform to the given result type: %s.", err),
+					Subject:  resultAttr.Expr.Range().Ptr(),
+				})
+			} else {
+				ret.Result = resultVal
+			}
+		}
+		ret.ResultRange = resultAttr.Expr.Range()
+	}
+
+	for _, block := range content.Blocks {
+		switch block.Type {
+
+		case "traversals":
+			if ret.ChecksTraversals {
+				// Indicates a duplicate traversals block
+				diags = diags.Append(&hcl.Diagnostic{
+					Severity: hcl.DiagError,
+					Summary:  "Duplicate \"traversals\" block",
+					Detail:   fmt.Sprintf("Only one traversals block is expected."),
+					Subject:  &block.TypeRange,
+				})
+				continue
+			}
+			expectTraversals, moreDiags := r.decodeTraversalsBlock(block)
+			diags = append(diags, moreDiags...)
+			if !moreDiags.HasErrors() {
+				ret.ChecksTraversals = true
+				ret.ExpectedTraversals = expectTraversals
+			}
+
+		case "diagnostics":
+			if len(ret.ExpectedDiags) > 0 {
+				// Indicates a duplicate diagnostics block
+				diags = diags.Append(&hcl.Diagnostic{
+					Severity: hcl.DiagError,
+					Summary:  "Duplicate \"diagnostics\" block",
+					Detail:   fmt.Sprintf("Only one diagnostics block is expected."),
+					Subject:  &block.TypeRange,
+				})
+				continue
+			}
+			expectDiags, moreDiags := r.decodeDiagnosticsBlock(block)
+			diags = append(diags, moreDiags...)
+			ret.ExpectedDiags = expectDiags
+
+		default:
+			// Shouldn't get here, because the above cases are exhaustive for
+			// our test file schema.
+			panic(fmt.Sprintf("unsupported block type %q", block.Type))
+		}
+	}
+
+	if ret.Result != cty.NilVal && len(ret.ExpectedDiags) > 0 {
+		diags = diags.Append(&hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Conflicting spec expectations",
+			Detail:   "This test spec includes expected diagnostics, so it may not also include an expected result.",
+			Subject:  &content.Attributes["result"].Range,
+		})
+	}
+
+	return ret, diags
+}
+
+func (r *Runner) decodeTraversalsBlock(block *hcl.Block) ([]*TestFileExpectTraversal, hcl.Diagnostics) {
+	var diags hcl.Diagnostics
+
+	content, moreDiags := block.Body.Content(testFileTraversalsSchema)
+	diags = append(diags, moreDiags...)
+	if moreDiags.HasErrors() {
+		return nil, diags
+	}
+
+	var ret []*TestFileExpectTraversal
+	for _, block := range content.Blocks {
+		// There's only one block type in our schema, so we can assume all
+		// blocks are of that type.
+		expectTraversal, moreDiags := r.decodeTraversalExpectBlock(block)
+		diags = append(diags, moreDiags...)
+		if expectTraversal != nil {
+			ret = append(ret, expectTraversal)
+		}
+	}
+
+	return ret, diags
+}
+
+func (r *Runner) decodeTraversalExpectBlock(block *hcl.Block) (*TestFileExpectTraversal, hcl.Diagnostics) {
+	var diags hcl.Diagnostics
+
+	rng, body, moreDiags := r.decodeRangeFromBody(block.Body)
+	diags = append(diags, moreDiags...)
+
+	content, moreDiags := body.Content(testFileTraversalExpectSchema)
+	diags = append(diags, moreDiags...)
+	if moreDiags.HasErrors() {
+		return nil, diags
+	}
+
+	var traversal hcl.Traversal
+	{
+		refAttr := content.Attributes["ref"]
+		traversal, moreDiags = hcl.AbsTraversalForExpr(refAttr.Expr)
+		diags = append(diags, moreDiags...)
+		if moreDiags.HasErrors() {
+			return nil, diags
+		}
+	}
+
+	return &TestFileExpectTraversal{
+		Traversal: traversal,
+		Range:     rng,
+		DeclRange: block.DefRange,
+	}, diags
+}
+
+func (r *Runner) decodeDiagnosticsBlock(block *hcl.Block) ([]*TestFileExpectDiag, hcl.Diagnostics) {
+	var diags hcl.Diagnostics
+
+	content, moreDiags := block.Body.Content(testFileDiagnosticsSchema)
+	diags = append(diags, moreDiags...)
+	if moreDiags.HasErrors() {
+		return nil, diags
+	}
+
+	if len(content.Blocks) == 0 {
+		diags = diags.Append(&hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Empty diagnostics block",
+			Detail:   "If a diagnostics block is present, at least one expectation statement (\"error\" or \"warning\" block) must be included.",
+			Subject:  &block.TypeRange,
+		})
+		return nil, diags
+	}
+
+	ret := make([]*TestFileExpectDiag, 0, len(content.Blocks))
+	for _, block := range content.Blocks {
+		rng, remain, moreDiags := r.decodeRangeFromBody(block.Body)
+		diags = append(diags, moreDiags...)
+		if diags.HasErrors() {
+			continue
+		}
+
+		// Should have nothing else in the block aside from the range definition.
+		_, moreDiags = remain.Content(&hcl.BodySchema{})
+		diags = append(diags, moreDiags...)
+
+		var severity hcl.DiagnosticSeverity
+		switch block.Type {
+		case "error":
+			severity = hcl.DiagError
+		case "warning":
+			severity = hcl.DiagWarning
+		default:
+			panic(fmt.Sprintf("unsupported block type %q", block.Type))
+		}
+
+		ret = append(ret, &TestFileExpectDiag{
+			Severity:  severity,
+			Range:     rng,
+			DeclRange: block.TypeRange,
+		})
+	}
+	return ret, diags
+}
+
+func (r *Runner) decodeRangeFromBody(body hcl.Body) (hcl.Range, hcl.Body, hcl.Diagnostics) {
+	type RawPos struct {
+		Line   int `hcl:"line"`
+		Column int `hcl:"column"`
+		Byte   int `hcl:"byte"`
+	}
+	type RawRange struct {
+		From   RawPos   `hcl:"from,block"`
+		To     RawPos   `hcl:"to,block"`
+		Remain hcl.Body `hcl:",remain"`
+	}
+
+	var raw RawRange
+	diags := gohcl.DecodeBody(body, nil, &raw)
+
+	return hcl.Range{
+		// We intentionally omit Filename here, because the test spec doesn't
+		// need to specify that explicitly: we can infer it to be the file
+		// path we pass to hcldec.
+		Start: hcl.Pos{
+			Line:   raw.From.Line,
+			Column: raw.From.Column,
+			Byte:   raw.From.Byte,
+		},
+		End: hcl.Pos{
+			Line:   raw.To.Line,
+			Column: raw.To.Column,
+			Byte:   raw.To.Byte,
+		},
+	}, raw.Remain, diags
+}
+
+var testFileSchema = &hcl.BodySchema{
+	Attributes: []hcl.AttributeSchema{
+		{
+			Name: "result",
+		},
+		{
+			Name: "result_type",
+		},
+	},
+	Blocks: []hcl.BlockHeaderSchema{
+		{
+			Type: "traversals",
+		},
+		{
+			Type: "diagnostics",
+		},
+	},
+}
+
+var testFileTraversalsSchema = &hcl.BodySchema{
+	Blocks: []hcl.BlockHeaderSchema{
+		{
+			Type: "expect",
+		},
+	},
+}
+
+var testFileTraversalExpectSchema = &hcl.BodySchema{
+	Attributes: []hcl.AttributeSchema{
+		{
+			Name:     "ref",
+			Required: true,
+		},
+	},
+	Blocks: []hcl.BlockHeaderSchema{
+		{
+			Type: "range",
+		},
+	},
+}
+
+var testFileDiagnosticsSchema = &hcl.BodySchema{
+	Blocks: []hcl.BlockHeaderSchema{
+		{
+			Type: "error",
+		},
+		{
+			Type: "warning",
+		},
+	},
+}
+
+var testFileRangeSchema = &hcl.BodySchema{
+	Blocks: []hcl.BlockHeaderSchema{
+		{
+			Type: "from",
+		},
+		{
+			Type: "to",
+		},
+	},
+}
+
+var testFilePosSchema = &hcl.BodySchema{
+	Attributes: []hcl.AttributeSchema{
+		{
+			Name:     "line",
+			Required: true,
+		},
+		{
+			Name:     "column",
+			Required: true,
+		},
+		{
+			Name:     "byte",
+			Required: true,
+		},
+	},
+}
diff --git a/cmd/hclspecsuite/traversals.go b/cmd/hclspecsuite/traversals.go
new file mode 100644
index 0000000..d689390
--- /dev/null
+++ b/cmd/hclspecsuite/traversals.go
@@ -0,0 +1,117 @@
+package main
+
+import (
+	"fmt"
+	"reflect"
+
+	"github.com/hashicorp/hcl/v2"
+)
+
+func findTraversalSpec(got hcl.Traversal, candidates []*TestFileExpectTraversal) *TestFileExpectTraversal {
+	for _, candidate := range candidates {
+		if traversalsAreEquivalent(candidate.Traversal, got) {
+			return candidate
+		}
+	}
+	return nil
+}
+
+func findTraversalForSpec(want *TestFileExpectTraversal, have []hcl.Traversal) hcl.Traversal {
+	for _, candidate := range have {
+		if traversalsAreEquivalent(candidate, want.Traversal) {
+			return candidate
+		}
+	}
+	return nil
+}
+
+func traversalsAreEquivalent(a, b hcl.Traversal) bool {
+	if len(a) != len(b) {
+		return false
+	}
+	for i := range a {
+		aStep := a[i]
+		bStep := b[i]
+
+		if reflect.TypeOf(aStep) != reflect.TypeOf(bStep) {
+			return false
+		}
+
+		// We can now assume that both are of the same type.
+		switch ts := aStep.(type) {
+
+		case hcl.TraverseRoot:
+			if bStep.(hcl.TraverseRoot).Name != ts.Name {
+				return false
+			}
+
+		case hcl.TraverseAttr:
+			if bStep.(hcl.TraverseAttr).Name != ts.Name {
+				return false
+			}
+
+		case hcl.TraverseIndex:
+			if !bStep.(hcl.TraverseIndex).Key.RawEquals(ts.Key) {
+				return false
+			}
+
+		default:
+			return false
+		}
+	}
+	return true
+}
+
+// checkTraversalsMatch determines if a given traversal matches the given
+// expectation, which must've been produced by an earlier call to
+// findTraversalSpec for the same traversal.
+func checkTraversalsMatch(got hcl.Traversal, filename string, match *TestFileExpectTraversal) hcl.Diagnostics {
+	var diags hcl.Diagnostics
+
+	gotRng := got.SourceRange()
+	wantRng := match.Range
+
+	if got, want := gotRng.Filename, filename; got != want {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Incorrect filename in detected traversal",
+			Detail: fmt.Sprintf(
+				"Filename was reported as %q, but was expecting %q.",
+				got, want,
+			),
+			Subject: match.Traversal.SourceRange().Ptr(),
+		})
+		return diags
+	}
+
+	// If we have the expected filename then we'll use that to construct the
+	// full "want range" here so that we can use it to point to the appropriate
+	// location in the remaining diagnostics.
+	wantRng.Filename = filename
+
+	if got, want := gotRng.Start, wantRng.Start; got != want {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Incorrect start position in detected traversal",
+			Detail: fmt.Sprintf(
+				"Start position was reported as line %d column %d byte %d, but was expecting line %d column %d byte %d.",
+				got.Line, got.Column, got.Byte,
+				want.Line, want.Column, want.Byte,
+			),
+			Subject: &wantRng,
+		})
+	}
+	if got, want := gotRng.End, wantRng.End; got != want {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Incorrect end position in detected traversal",
+			Detail: fmt.Sprintf(
+				"End position was reported as line %d column %d byte %d, but was expecting line %d column %d byte %d.",
+				got.Line, got.Column, got.Byte,
+				want.Line, want.Column, want.Byte,
+			),
+			Subject: &wantRng,
+		})
+	}
+	return diags
+}
diff --git a/diagnostic.go b/diagnostic.go
new file mode 100644
index 0000000..bcf4eb3
--- /dev/null
+++ b/diagnostic.go
@@ -0,0 +1,186 @@
+package hcl
+
+import (
+	"fmt"
+)
+
+// DiagnosticSeverity represents the severity of a diagnostic.
+type DiagnosticSeverity int
+
+const (
+	// DiagInvalid is the invalid zero value of DiagnosticSeverity
+	DiagInvalid DiagnosticSeverity = iota
+
+	// DiagError indicates that the problem reported by a diagnostic prevents
+	// further progress in parsing and/or evaluating the subject.
+	DiagError
+
+	// DiagWarning indicates that the problem reported by a diagnostic warrants
+	// user attention but does not prevent further progress. It is most
+	// commonly used for showing deprecation notices.
+	DiagWarning
+)
+
+// Diagnostic represents information to be presented to a user about an
+// error or anomaly in parsing or evaluating configuration.
+type Diagnostic struct {
+	Severity DiagnosticSeverity
+
+	// Summary and Detail contain the English-language description of the
+	// problem. Summary is a terse description of the general problem and
+	// detail is a more elaborate, often-multi-sentence description of
+	// the problem and what might be done to solve it.
+	Summary string
+	Detail  string
+
+	// Subject and Context are both source ranges relating to the diagnostic.
+	//
+	// Subject is a tight range referring to exactly the construct that
+	// is problematic, while Context is an optional broader range (which should
+	// fully contain Subject) that ought to be shown around Subject when
+	// generating isolated source-code snippets in diagnostic messages.
+	// If Context is nil, the Subject is also the Context.
+	//
+	// Some diagnostics have no source ranges at all. If Context is set then
+	// Subject should always also be set.
+	Subject *Range
+	Context *Range
+
+	// For diagnostics that occur when evaluating an expression, Expression
+	// may refer to that expression and EvalContext may point to the
+	// EvalContext that was active when evaluating it. This may allow for the
+	// inclusion of additional useful information when rendering a diagnostic
+	// message to the user.
+	//
+	// It is not always possible to select a single EvalContext for a
+	// diagnostic, and so in some cases this field may be nil even when an
+	// expression causes a problem.
+	//
+	// EvalContexts form a tree, so the given EvalContext may refer to a parent
+	// which in turn refers to another parent, etc. For a full picture of all
+	// of the active variables and functions the caller must walk up this
+	// chain, preferring definitions that are "closer" to the expression in
+	// case of colliding names.
+	Expression  Expression
+	EvalContext *EvalContext
+
+	// Extra is an extension point for additional machine-readable information
+	// about this problem.
+	//
+	// Recipients of diagnostic objects may type-assert this value with
+	// specific interface types they know about to discover if any additional
+	// information is available that is interesting for their use-case.
+	//
+	// Extra is always considered to be optional extra information and so a
+	// diagnostic message should still always be fully described (from the
+	// perspective of a human who understands the language the messages are
+	// written in) by the other fields in case a particular recipient.
+	//
+	// Functions that return diagnostics with Extra populated should typically
+	// document that they place values implementing a particular interface,
+	// rather than a concrete type, and define that interface such that its
+	// methods can dynamically indicate a lack of support at runtime even
+	// if the interface happens to be statically available. An Extra
+	// type that wraps other Extra values should additionally implement
+	// interface DiagnosticExtraUnwrapper to return the value they are wrapping
+	// so that callers can access inner values to type-assert against.
+	Extra interface{}
+}
+
+// Diagnostics is a list of Diagnostic instances.
+type Diagnostics []*Diagnostic
+
+// error implementation, so that diagnostics can be returned via APIs
+// that normally deal in vanilla Go errors.
+//
+// This presents only minimal context about the error, for compatibility
+// with usual expectations about how errors will present as strings.
+func (d *Diagnostic) Error() string {
+	return fmt.Sprintf("%s: %s; %s", d.Subject, d.Summary, d.Detail)
+}
+
+// error implementation, so that sets of diagnostics can be returned via
+// APIs that normally deal in vanilla Go errors.
+func (d Diagnostics) Error() string {
+	count := len(d)
+	switch {
+	case count == 0:
+		return "no diagnostics"
+	case count == 1:
+		return d[0].Error()
+	default:
+		return fmt.Sprintf("%s, and %d other diagnostic(s)", d[0].Error(), count-1)
+	}
+}
+
+// Append appends a new error to a Diagnostics and return the whole Diagnostics.
+//
+// This is provided as a convenience for returning from a function that
+// collects and then returns a set of diagnostics:
+//
+//     return nil, diags.Append(&hcl.Diagnostic{ ... })
+//
+// Note that this modifies the array underlying the diagnostics slice, so
+// must be used carefully within a single codepath. It is incorrect (and rude)
+// to extend a diagnostics created by a different subsystem.
+func (d Diagnostics) Append(diag *Diagnostic) Diagnostics {
+	return append(d, diag)
+}
+
+// Extend concatenates the given Diagnostics with the receiver and returns
+// the whole new Diagnostics.
+//
+// This is similar to Append but accepts multiple diagnostics to add. It has
+// all the same caveats and constraints.
+func (d Diagnostics) Extend(diags Diagnostics) Diagnostics {
+	return append(d, diags...)
+}
+
+// HasErrors returns true if the receiver contains any diagnostics of
+// severity DiagError.
+func (d Diagnostics) HasErrors() bool {
+	for _, diag := range d {
+		if diag.Severity == DiagError {
+			return true
+		}
+	}
+	return false
+}
+
+func (d Diagnostics) Errs() []error {
+	var errs []error
+	for _, diag := range d {
+		if diag.Severity == DiagError {
+			errs = append(errs, diag)
+		}
+	}
+
+	return errs
+}
+
+// A DiagnosticWriter emits diagnostics somehow.
+type DiagnosticWriter interface {
+	WriteDiagnostic(*Diagnostic) error
+	WriteDiagnostics(Diagnostics) error
+}
+
+// DiagnosticExtraUnwrapper is an interface implemented by values in the
+// Extra field of Diagnostic when they are wrapping another "Extra" value that
+// was generated downstream.
+//
+// Diagnostic recipients which want to examine "Extra" values to sniff for
+// particular types of extra data can either type-assert this interface
+// directly and repeatedly unwrap until they recieve nil, or can use the
+// helper function DiagnosticExtra.
+type DiagnosticExtraUnwrapper interface {
+	// If the reciever is wrapping another "diagnostic extra" value, returns
+	// that value. Otherwise returns nil to indicate dynamically that nothing
+	// is wrapped.
+	//
+	// The "nothing is wrapped" condition can be signalled either by this
+	// method returning nil or by a type not implementing this interface at all.
+	//
+	// Implementers should never create unwrap "cycles" where a nested extra
+	// value returns a value that was also wrapping it.
+	UnwrapDiagnosticExtra() interface{}
+}
diff --git a/diagnostic_text.go b/diagnostic_text.go
new file mode 100644
index 0000000..0b4a262
--- /dev/null
+++ b/diagnostic_text.go
@@ -0,0 +1,311 @@
+package hcl
+
+import (
+	"bufio"
+	"bytes"
+	"errors"
+	"fmt"
+	"io"
+	"sort"
+
+	wordwrap "github.com/mitchellh/go-wordwrap"
+	"github.com/zclconf/go-cty/cty"
+)
+
+type diagnosticTextWriter struct {
+	files map[string]*File
+	wr    io.Writer
+	width uint
+	color bool
+}
+
+// NewDiagnosticTextWriter creates a DiagnosticWriter that writes diagnostics
+// to the given writer as formatted text.
+//
+// It is designed to produce text appropriate to print in a monospaced font
+// in a terminal of a particular width, or optionally with no width limit.
+//
+// The given width may be zero to disable word-wrapping of the detail text
+// and truncation of source code snippets.
+//
+// If color is set to true, the output will include VT100 escape sequences to
+// color-code the severity indicators. It is suggested to turn this off if
+// the target writer is not a terminal.
+func NewDiagnosticTextWriter(wr io.Writer, files map[string]*File, width uint, color bool) DiagnosticWriter {
+	return &diagnosticTextWriter{
+		files: files,
+		wr:    wr,
+		width: width,
+		color: color,
+	}
+}
+
+func (w *diagnosticTextWriter) WriteDiagnostic(diag *Diagnostic) error {
+	if diag == nil {
+		return errors.New("nil diagnostic")
+	}
+
+	var colorCode, highlightCode, resetCode string
+	if w.color {
+		switch diag.Severity {
+		case DiagError:
+			colorCode = "\x1b[31m"
+		case DiagWarning:
+			colorCode = "\x1b[33m"
+		}
+		resetCode = "\x1b[0m"
+		highlightCode = "\x1b[1;4m"
+	}
+
+	var severityStr string
+	switch diag.Severity {
+	case DiagError:
+		severityStr = "Error"
+	case DiagWarning:
+		severityStr = "Warning"
+	default:
+		// should never happen
+		severityStr = "???????"
+	}
+
+	fmt.Fprintf(w.wr, "%s%s%s: %s\n\n", colorCode, severityStr, resetCode, diag.Summary)
+
+	if diag.Subject != nil {
+		snipRange := *diag.Subject
+		highlightRange := snipRange
+		if diag.Context != nil {
+			// Show enough of the source code to include both the subject
+			// and context ranges, which overlap in all reasonable
+			// situations.
+			snipRange = RangeOver(snipRange, *diag.Context)
+		}
+		// We can't illustrate an empty range, so we'll turn such ranges into
+		// single-character ranges, which might not be totally valid (may point
+		// off the end of a line, or off the end of the file) but are good
+		// enough for the bounds checks we do below.
+		if snipRange.Empty() {
+			snipRange.End.Byte++
+			snipRange.End.Column++
+		}
+		if highlightRange.Empty() {
+			highlightRange.End.Byte++
+			highlightRange.End.Column++
+		}
+
+		file := w.files[diag.Subject.Filename]
+		if file == nil || file.Bytes == nil {
+			fmt.Fprintf(w.wr, "  on %s line %d:\n  (source code not available)\n\n", diag.Subject.Filename, diag.Subject.Start.Line)
+		} else {
+
+			var contextLine string
+			if diag.Subject != nil {
+				contextLine = contextString(file, diag.Subject.Start.Byte)
+				if contextLine != "" {
+					contextLine = ", in " + contextLine
+				}
+			}
+
+			fmt.Fprintf(w.wr, "  on %s line %d%s:\n", diag.Subject.Filename, diag.Subject.Start.Line, contextLine)
+
+			src := file.Bytes
+			sc := NewRangeScanner(src, diag.Subject.Filename, bufio.ScanLines)
+
+			for sc.Scan() {
+				lineRange := sc.Range()
+				if !lineRange.Overlaps(snipRange) {
+					continue
+				}
+
+				beforeRange, highlightedRange, afterRange := lineRange.PartitionAround(highlightRange)
+				if highlightedRange.Empty() {
+					fmt.Fprintf(w.wr, "%4d: %s\n", lineRange.Start.Line, sc.Bytes())
+				} else {
+					before := beforeRange.SliceBytes(src)
+					highlighted := highlightedRange.SliceBytes(src)
+					after := afterRange.SliceBytes(src)
+					fmt.Fprintf(
+						w.wr, "%4d: %s%s%s%s%s\n",
+						lineRange.Start.Line,
+						before,
+						highlightCode, highlighted, resetCode,
+						after,
+					)
+				}
+
+			}
+
+			w.wr.Write([]byte{'\n'})
+		}
+
+		if diag.Expression != nil && diag.EvalContext != nil {
+			// We will attempt to render the values for any variables
+			// referenced in the given expression as additional context, for
+			// situations where the same expression is evaluated multiple
+			// times in different scopes.
+			expr := diag.Expression
+			ctx := diag.EvalContext
+
+			vars := expr.Variables()
+			stmts := make([]string, 0, len(vars))
+			seen := make(map[string]struct{}, len(vars))
+			for _, traversal := range vars {
+				val, diags := traversal.TraverseAbs(ctx)
+				if diags.HasErrors() {
+					// Skip anything that generates errors, since we probably
+					// already have the same error in our diagnostics set
+					// already.
+					continue
+				}
+
+				traversalStr := w.traversalStr(traversal)
+				if _, exists := seen[traversalStr]; exists {
+					continue // don't show duplicates when the same variable is referenced multiple times
+				}
+				switch {
+				case !val.IsKnown():
+					// Can't say anything about this yet, then.
+					continue
+				case val.IsNull():
+					stmts = append(stmts, fmt.Sprintf("%s set to null", traversalStr))
+				default:
+					stmts = append(stmts, fmt.Sprintf("%s as %s", traversalStr, w.valueStr(val)))
+				}
+				seen[traversalStr] = struct{}{}
+			}
+
+			sort.Strings(stmts) // FIXME: Should maybe use a traversal-aware sort that can sort numeric indexes properly?
+			last := len(stmts) - 1
+
+			for i, stmt := range stmts {
+				switch i {
+				case 0:
+					w.wr.Write([]byte{'w', 'i', 't', 'h', ' '})
+				default:
+					w.wr.Write([]byte{' ', ' ', ' ', ' ', ' '})
+				}
+				w.wr.Write([]byte(stmt))
+				switch i {
+				case last:
+					w.wr.Write([]byte{'.', '\n', '\n'})
+				default:
+					w.wr.Write([]byte{',', '\n'})
+				}
+			}
+		}
+	}
+
+	if diag.Detail != "" {
+		detail := diag.Detail
+		if w.width != 0 {
+			detail = wordwrap.WrapString(detail, w.width)
+		}
+		fmt.Fprintf(w.wr, "%s\n\n", detail)
+	}
+
+	return nil
+}
+
+func (w *diagnosticTextWriter) WriteDiagnostics(diags Diagnostics) error {
+	for _, diag := range diags {
+		err := w.WriteDiagnostic(diag)
+		if err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func (w *diagnosticTextWriter) traversalStr(traversal Traversal) string {
+	// This is a specialized subset of traversal rendering tailored to
+	// producing helpful contextual messages in diagnostics. It is not
+	// comprehensive nor intended to be used for other purposes.
+
+	var buf bytes.Buffer
+	for _, step := range traversal {
+		switch tStep := step.(type) {
+		case TraverseRoot:
+			buf.WriteString(tStep.Name)
+		case TraverseAttr:
+			buf.WriteByte('.')
+			buf.WriteString(tStep.Name)
+		case TraverseIndex:
+			buf.WriteByte('[')
+			if keyTy := tStep.Key.Type(); keyTy.IsPrimitiveType() {
+				buf.WriteString(w.valueStr(tStep.Key))
+			} else {
+				// We'll just use a placeholder for more complex values,
+				// since otherwise our result could grow ridiculously long.
+				buf.WriteString("...")
+			}
+			buf.WriteByte(']')
+		}
+	}
+	return buf.String()
+}
+
+func (w *diagnosticTextWriter) valueStr(val cty.Value) string {
+	// This is a specialized subset of value rendering tailored to producing
+	// helpful but concise messages in diagnostics. It is not comprehensive
+	// nor intended to be used for other purposes.
+
+	ty := val.Type()
+	switch {
+	case val.IsNull():
+		return "null"
+	case !val.IsKnown():
+		// Should never happen here because we should filter before we get
+		// in here, but we'll do something reasonable rather than panic.
+		return "(not yet known)"
+	case ty == cty.Bool:
+		if val.True() {
+			return "true"
+		}
+		return "false"
+	case ty == cty.Number:
+		bf := val.AsBigFloat()
+		return bf.Text('g', 10)
+	case ty == cty.String:
+		// Go string syntax is not exactly the same as HCL native string syntax,
+		// but we'll accept the minor edge-cases where this is different here
+		// for now, just to get something reasonable here.
+		return fmt.Sprintf("%q", val.AsString())
+	case ty.IsCollectionType() || ty.IsTupleType():
+		l := val.LengthInt()
+		switch l {
+		case 0:
+			return "empty " + ty.FriendlyName()
+		case 1:
+			return ty.FriendlyName() + " with 1 element"
+		default:
+			return fmt.Sprintf("%s with %d elements", ty.FriendlyName(), l)
+		}
+	case ty.IsObjectType():
+		atys := ty.AttributeTypes()
+		l := len(atys)
+		switch l {
+		case 0:
+			return "object with no attributes"
+		case 1:
+			var name string
+			for k := range atys {
+				name = k
+			}
+			return fmt.Sprintf("object with 1 attribute %q", name)
+		default:
+			return fmt.Sprintf("object with %d attributes", l)
+		}
+	default:
+		return ty.FriendlyName()
+	}
+}
+
+func contextString(file *File, offset int) string {
+	type contextStringer interface {
+		ContextString(offset int) string
+	}
+
+	if cser, ok := file.Nav.(contextStringer); ok {
+		return cser.ContextString(offset)
+	}
+	return ""
+}
diff --git a/diagnostic_text_test.go b/diagnostic_text_test.go
new file mode 100644
index 0000000..8757fbe
--- /dev/null
+++ b/diagnostic_text_test.go
@@ -0,0 +1,235 @@
+package hcl
+
+import (
+	"bytes"
+	"fmt"
+	"testing"
+
+	"github.com/zclconf/go-cty/cty"
+)
+
+func TestDiagnosticTextWriter(t *testing.T) {
+	tests := []struct {
+		Input *Diagnostic
+		Want  string
+	}{
+		{
+			&Diagnostic{
+				Severity: DiagError,
+				Summary:  "Splines not reticulated",
+				Detail:   "All splines must be pre-reticulated.",
+				Subject: &Range{
+					Start: Pos{
+						Byte:   0,
+						Column: 1,
+						Line:   1,
+					},
+					End: Pos{
+						Byte:   3,
+						Column: 4,
+						Line:   1,
+					},
+				},
+			},
+			`Error: Splines not reticulated
+
+  on  line 1, in hardcoded-context:
+   1: foo = 1
+
+All splines must be pre-reticulated.
+
+`,
+		},
+		{
+			&Diagnostic{
+				Severity: DiagError,
+				Summary:  "Unsupported attribute",
+				Detail:   `"baz" is not a supported top-level attribute. Did you mean "bam"?`,
+				Subject: &Range{
+					Start: Pos{
+						Byte:   16,
+						Column: 1,
+						Line:   3,
+					},
+					End: Pos{
+						Byte:   19,
+						Column: 4,
+						Line:   3,
+					},
+				},
+			},
+			`Error: Unsupported attribute
+
+  on  line 3, in hardcoded-context:
+   3: baz = 3
+
+"baz" is not a supported top-level
+attribute. Did you mean "bam"?
+
+`,
+		},
+		{
+			&Diagnostic{
+				Severity: DiagError,
+				Summary:  "Unsupported attribute",
+				Detail:   `"pizza" is not a supported attribute. Did you mean "pizzetta"?`,
+				Subject: &Range{
+					Start: Pos{
+						Byte:   42,
+						Column: 3,
+						Line:   5,
+					},
+					End: Pos{
+						Byte:   47,
+						Column: 8,
+						Line:   5,
+					},
+				},
+				// This is actually not a great example of a context, but is here to test
+				// whether we're able to show a multi-line context when needed.
+				Context: &Range{
+					Start: Pos{
+						Byte:   24,
+						Column: 1,
+						Line:   4,
+					},
+					End: Pos{
+						Byte:   60,
+						Column: 2,
+						Line:   6,
+					},
+				},
+			},
+			`Error: Unsupported attribute
+
+  on  line 5, in hardcoded-context:
+   4: block "party" {
+   5:   pizza = "cheese"
+   6: }
+
+"pizza" is not a supported attribute.
+Did you mean "pizzetta"?
+
+`,
+		},
+		{
+			&Diagnostic{
+				Severity: DiagError,
+				Summary:  "Test of including relevant variable values",
+				Detail:   `This diagnostic includes an expression and an evalcontext.`,
+				Subject: &Range{
+					Start: Pos{
+						Byte:   42,
+						Column: 3,
+						Line:   5,
+					},
+					End: Pos{
+						Byte:   47,
+						Column: 8,
+						Line:   5,
+					},
+				},
+				Expression: &diagnosticTestExpr{
+					vars: []Traversal{
+						{
+							TraverseRoot{
+								Name: "foo",
+							},
+						},
+						{
+							TraverseRoot{
+								Name: "bar",
+							},
+							TraverseAttr{
+								Name: "baz",
+							},
+						},
+						{
+							TraverseRoot{
+								Name: "missing",
+							},
+						},
+						{
+							TraverseRoot{
+								Name: "boz",
+							},
+						},
+					},
+				},
+				EvalContext: &EvalContext{
+					parent: &EvalContext{
+						Variables: map[string]cty.Value{
+							"foo": cty.StringVal("foo value"),
+						},
+					},
+					Variables: map[string]cty.Value{
+						"bar": cty.ObjectVal(map[string]cty.Value{
+							"baz": cty.ListValEmpty(cty.String),
+						}),
+						"boz":    cty.NumberIntVal(5),
+						"unused": cty.True,
+					},
+				},
+			},
+			`Error: Test of including relevant variable values
+
+  on  line 5, in hardcoded-context:
+   5:   pizza = "cheese"
+
+with bar.baz as empty list of string,
+     boz as 5,
+     foo as "foo value".
+
+This diagnostic includes an expression
+and an evalcontext.
+
+`,
+		},
+	}
+
+	files := map[string]*File{
+		"": &File{
+			Bytes: []byte(testDiagnosticTextWriterSource),
+			Nav:   &diagnosticTestNav{},
+		},
+	}
+
+	for i, test := range tests {
+		t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) {
+			bwr := &bytes.Buffer{}
+			dwr := NewDiagnosticTextWriter(bwr, files, 40, false)
+			err := dwr.WriteDiagnostic(test.Input)
+			if err != nil {
+				t.Fatalf("unexpected error: %s", err)
+			}
+			got := bwr.String()
+			if got != test.Want {
+				t.Errorf("wrong result\n\ngot:\n%swant:\n%s", got, test.Want)
+			}
+		})
+	}
+}
+
+const testDiagnosticTextWriterSource = `foo = 1
+bar = 2
+baz = 3
+block "party" {
+  pizza = "cheese"
+}
+`
+
+type diagnosticTestNav struct {
+}
+
+func (tn *diagnosticTestNav) ContextString(offset int) string {
+	return "hardcoded-context"
+}
+
+type diagnosticTestExpr struct {
+	vars []Traversal
+	staticExpr
+}
+
+func (e *diagnosticTestExpr) Variables() []Traversal {
+	return e.vars
+}
diff --git a/diagnostic_typeparams.go b/diagnostic_typeparams.go
new file mode 100644
index 0000000..6994e23
--- /dev/null
+++ b/diagnostic_typeparams.go
@@ -0,0 +1,39 @@
+//go:build go1.18
+// +build go1.18
+
+package hcl
+
+// This file contains additional diagnostics-related symbols that use the
+// Go 1.18 type parameters syntax and would therefore be incompatible with
+// Go 1.17 and earlier.
+
+// DiagnosticExtra attempts to retrieve an "extra value" of type T from the
+// given diagnostic, if either the diag.Extra field directly contains a value
+// of that type or the value implements DiagnosticExtraUnwrapper and directly
+// or indirectly returns a value of that type.
+//
+// Type T should typically be an interface type, so that code which generates
+// diagnostics can potentially return different implementations of the same
+// interface dynamically as needed.
+//
+// If a value of type T is found, returns that value and true to indicate
+// success. Otherwise, returns the zero value of T and false to indicate
+// failure.
+func DiagnosticExtra[T any](diag *Diagnostic) (T, bool) {
+	extra := diag.Extra
+	var zero T
+
+	for {
+		if ret, ok := extra.(T); ok {
+			return ret, true
+		}
+
+		if unwrap, ok := extra.(DiagnosticExtraUnwrapper); ok {
+			// If our "extra" implements DiagnosticExtraUnwrapper then we'll
+			// unwrap one level and try this again.
+			extra = unwrap.UnwrapDiagnosticExtra()
+		} else {
+			return zero, false
+		}
+	}
+}
diff --git a/didyoumean.go b/didyoumean.go
new file mode 100644
index 0000000..c128334
--- /dev/null
+++ b/didyoumean.go
@@ -0,0 +1,24 @@
+package hcl
+
+import (
+	"github.com/agext/levenshtein"
+)
+
+// nameSuggestion tries to find a name from the given slice of suggested names
+// that is close to the given name and returns it if found. If no suggestion
+// is close enough, returns the empty string.
+//
+// The suggestions are tried in order, so earlier suggestions take precedence
+// if the given string is similar to two or more suggestions.
+//
+// This function is intended to be used with a relatively-small number of
+// suggestions. It's not optimized for hundreds or thousands of them.
+func nameSuggestion(given string, suggestions []string) string {
+	for _, suggestion := range suggestions {
+		dist := levenshtein.Distance(given, suggestion, nil)
+		if dist < 3 { // threshold determined experimentally
+			return suggestion
+		}
+	}
+	return ""
+}
diff --git a/doc.go b/doc.go
new file mode 100644
index 0000000..0d43fb2
--- /dev/null
+++ b/doc.go
@@ -0,0 +1,34 @@
+// Package hcl contains the main modelling types and general utility functions
+// for HCL.
+//
+// For a simple entry point into HCL, see the package in the subdirectory
+// "hclsimple", which has an opinionated function Decode that can decode HCL
+// configurations in either native HCL syntax or JSON syntax into a Go struct
+// type:
+//
+//     package main
+//
+//     import (
+//     	"log"
+//     	"github.com/hashicorp/hcl/v2/hclsimple"
+//     )
+//
+//     type Config struct {
+//     	LogLevel string `hcl:"log_level"`
+//     }
+//
+//     func main() {
+//     	var config Config
+//     	err := hclsimple.DecodeFile("config.hcl", nil, &config)
+//     	if err != nil {
+//     		log.Fatalf("Failed to load configuration: %s", err)
+//     	}
+//     	log.Printf("Configuration is %#v", config)
+//     }
+//
+// If your application needs more control over the evaluation of the
+// configuration, you can use the functions in the subdirectories hclparse,
+// gohcl, hcldec, etc. Splitting the handling of configuration into multiple
+// phases allows for advanced patterns such as allowing expressions in one
+// part of the configuration to refer to data defined in another part.
+package hcl
diff --git a/eval_context.go b/eval_context.go
new file mode 100644
index 0000000..915910a
--- /dev/null
+++ b/eval_context.go
@@ -0,0 +1,25 @@
+package hcl
+
+import (
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/function"
+)
+
+// An EvalContext provides the variables and functions that should be used
+// to evaluate an expression.
+type EvalContext struct {
+	Variables map[string]cty.Value
+	Functions map[string]function.Function
+	parent    *EvalContext
+}
+
+// NewChild returns a new EvalContext that is a child of the receiver.
+func (ctx *EvalContext) NewChild() *EvalContext {
+	return &EvalContext{parent: ctx}
+}
+
+// Parent returns the parent of the receiver, or nil if the receiver has
+// no parent.
+func (ctx *EvalContext) Parent() *EvalContext {
+	return ctx.parent
+}
diff --git a/expr_call.go b/expr_call.go
new file mode 100644
index 0000000..6963fba
--- /dev/null
+++ b/expr_call.go
@@ -0,0 +1,46 @@
+package hcl
+
+// ExprCall tests if the given expression is a function call and,
+// if so, extracts the function name and the expressions that represent
+// the arguments. If the given expression is not statically a function call,
+// error diagnostics are returned.
+//
+// A particular Expression implementation can support this function by
+// offering a method called ExprCall that takes no arguments and returns
+// *StaticCall. This method should return nil if a static call cannot
+// be extracted.  Alternatively, an implementation can support
+// UnwrapExpression to delegate handling of this function to a wrapped
+// Expression object.
+func ExprCall(expr Expression) (*StaticCall, Diagnostics) {
+	type exprCall interface {
+		ExprCall() *StaticCall
+	}
+
+	physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
+		_, supported := expr.(exprCall)
+		return supported
+	})
+
+	if exC, supported := physExpr.(exprCall); supported {
+		if call := exC.ExprCall(); call != nil {
+			return call, nil
+		}
+	}
+	return nil, Diagnostics{
+		&Diagnostic{
+			Severity: DiagError,
+			Summary:  "Invalid expression",
+			Detail:   "A static function call is required.",
+			Subject:  expr.StartRange().Ptr(),
+		},
+	}
+}
+
+// StaticCall represents a function call that was extracted statically from
+// an expression using ExprCall.
+type StaticCall struct {
+	Name      string
+	NameRange Range
+	Arguments []Expression
+	ArgsRange Range
+}
diff --git a/expr_list.go b/expr_list.go
new file mode 100644
index 0000000..d05cca0
--- /dev/null
+++ b/expr_list.go
@@ -0,0 +1,37 @@
+package hcl
+
+// ExprList tests if the given expression is a static list construct and,
+// if so, extracts the expressions that represent the list elements.
+// If the given expression is not a static list, error diagnostics are
+// returned.
+//
+// A particular Expression implementation can support this function by
+// offering a method called ExprList that takes no arguments and returns
+// []Expression. This method should return nil if a static list cannot
+// be extracted.  Alternatively, an implementation can support
+// UnwrapExpression to delegate handling of this function to a wrapped
+// Expression object.
+func ExprList(expr Expression) ([]Expression, Diagnostics) {
+	type exprList interface {
+		ExprList() []Expression
+	}
+
+	physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
+		_, supported := expr.(exprList)
+		return supported
+	})
+
+	if exL, supported := physExpr.(exprList); supported {
+		if list := exL.ExprList(); list != nil {
+			return list, nil
+		}
+	}
+	return nil, Diagnostics{
+		&Diagnostic{
+			Severity: DiagError,
+			Summary:  "Invalid expression",
+			Detail:   "A static list expression is required.",
+			Subject:  expr.StartRange().Ptr(),
+		},
+	}
+}
diff --git a/expr_map.go b/expr_map.go
new file mode 100644
index 0000000..96d1ce4
--- /dev/null
+++ b/expr_map.go
@@ -0,0 +1,44 @@
+package hcl
+
+// ExprMap tests if the given expression is a static map construct and,
+// if so, extracts the expressions that represent the map elements.
+// If the given expression is not a static map, error diagnostics are
+// returned.
+//
+// A particular Expression implementation can support this function by
+// offering a method called ExprMap that takes no arguments and returns
+// []KeyValuePair. This method should return nil if a static map cannot
+// be extracted.  Alternatively, an implementation can support
+// UnwrapExpression to delegate handling of this function to a wrapped
+// Expression object.
+func ExprMap(expr Expression) ([]KeyValuePair, Diagnostics) {
+	type exprMap interface {
+		ExprMap() []KeyValuePair
+	}
+
+	physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
+		_, supported := expr.(exprMap)
+		return supported
+	})
+
+	if exM, supported := physExpr.(exprMap); supported {
+		if pairs := exM.ExprMap(); pairs != nil {
+			return pairs, nil
+		}
+	}
+	return nil, Diagnostics{
+		&Diagnostic{
+			Severity: DiagError,
+			Summary:  "Invalid expression",
+			Detail:   "A static map expression is required.",
+			Subject:  expr.StartRange().Ptr(),
+		},
+	}
+}
+
+// KeyValuePair represents a pair of expressions that serve as a single item
+// within a map or object definition construct.
+type KeyValuePair struct {
+	Key   Expression
+	Value Expression
+}
diff --git a/expr_unwrap.go b/expr_unwrap.go
new file mode 100644
index 0000000..6d5d205
--- /dev/null
+++ b/expr_unwrap.go
@@ -0,0 +1,68 @@
+package hcl
+
+type unwrapExpression interface {
+	UnwrapExpression() Expression
+}
+
+// UnwrapExpression removes any "wrapper" expressions from the given expression,
+// to recover the representation of the physical expression given in source
+// code.
+//
+// Sometimes wrapping expressions are used to modify expression behavior, e.g.
+// in extensions that need to make some local variables available to certain
+// sub-trees of the configuration. This can make it difficult to reliably
+// type-assert on the physical AST types used by the underlying syntax.
+//
+// Unwrapping an expression may modify its behavior by stripping away any
+// additional constraints or capabilities being applied to the Value and
+// Variables methods, so this function should generally only be used prior
+// to operations that concern themselves with the static syntax of the input
+// configuration, and not with the effective value of the expression.
+//
+// Wrapper expression types must support unwrapping by implementing a method
+// called UnwrapExpression that takes no arguments and returns the embedded
+// Expression. Implementations of this method should peel away only one level
+// of wrapping, if multiple are present. This method may return nil to
+// indicate _dynamically_ that no wrapped expression is available, for
+// expression types that might only behave as wrappers in certain cases.
+func UnwrapExpression(expr Expression) Expression {
+	for {
+		unwrap, wrapped := expr.(unwrapExpression)
+		if !wrapped {
+			return expr
+		}
+		innerExpr := unwrap.UnwrapExpression()
+		if innerExpr == nil {
+			return expr
+		}
+		expr = innerExpr
+	}
+}
+
+// UnwrapExpressionUntil is similar to UnwrapExpression except it gives the
+// caller an opportunity to test each level of unwrapping to see each a
+// particular expression is accepted.
+//
+// This could be used, for example, to unwrap until a particular other
+// interface is satisfied, regardless of wrap wrapping level it is satisfied
+// at.
+//
+// The given callback function must return false to continue wrapping, or
+// true to accept and return the proposed expression given. If the callback
+// function rejects even the final, physical expression then the result of
+// this function is nil.
+func UnwrapExpressionUntil(expr Expression, until func(Expression) bool) Expression {
+	for {
+		if until(expr) {
+			return expr
+		}
+		unwrap, wrapped := expr.(unwrapExpression)
+		if !wrapped {
+			return nil
+		}
+		expr = unwrap.UnwrapExpression()
+		if expr == nil {
+			return nil
+		}
+	}
+}
diff --git a/ext/README.md b/ext/README.md
new file mode 100644
index 0000000..f7f2bc9
--- /dev/null
+++ b/ext/README.md
@@ -0,0 +1,9 @@
+# HCL Extensions
+
+This directory contains some packages implementing some extensions to HCL
+that add features by building on the core API in the main `hcl` package.
+
+These serve as optional language extensions for use-cases that are limited only
+to specific callers. Generally these make the language more expressive at
+the expense of increased dynamic behavior that may be undesirable for
+applications that need to impose more rigid structure on configuration.
diff --git a/ext/customdecode/README.md b/ext/customdecode/README.md
new file mode 100644
index 0000000..1636f57
--- /dev/null
+++ b/ext/customdecode/README.md
@@ -0,0 +1,209 @@
+# HCL Custom Static Decoding Extension
+
+This HCL extension provides a mechanism for defining arguments in an HCL-based
+language whose values are derived using custom decoding rules against the
+HCL expression syntax, overriding the usual behavior of normal expression
+evaluation.
+
+"Arguments", for the purpose of this extension, currently includes the
+following two contexts:
+
+* For applications using `hcldec` for dynamic decoding, a `hcldec.AttrSpec`
+  or `hcldec.BlockAttrsSpec` can be given a special type constraint that
+  opts in to custom decoding behavior for the attribute(s) that are selected
+  by that specification.
+
+* When working with the HCL native expression syntax, a function given in
+  the `hcl.EvalContext` during evaluation can have parameters with special
+  type constraints that opt in to custom decoding behavior for the argument
+  expression associated with that parameter in any call.
+
+The above use-cases are rather abstract, so we'll consider a motivating
+real-world example: sometimes we (language designers) need to allow users
+to specify type constraints directly in the language itself, such as in
+[Terraform's Input Variables](https://www.terraform.io/docs/configuration/variables.html).
+Terraform's `variable` blocks include an argument called `type` which takes
+a type constraint given using HCL expression building-blocks as defined by
+[the HCL `typeexpr` extension](../typeexpr/README.md).
+
+A "type constraint expression" of that sort is not an expression intended to
+be evaluated in the usual way. Instead, the physical expression is
+deconstructed using [the static analysis operations](../../spec.md#static-analysis)
+to produce a `cty.Type` as the result, rather than a `cty.Value`.
+
+The purpose of this Custom Static Decoding Extension, then, is to provide a
+bridge to allow that sort of custom decoding to be used via mechanisms that
+normally deal in `cty.Value`, such as `hcldec` and native syntax function
+calls as listed above.
+
+(Note: [`gohcl`](https://pkg.go.dev/github.com/hashicorp/hcl/v2/gohcl) has
+its own mechanism to support this use case, exploiting the fact that it is
+working directly with "normal" Go types. Decoding into a struct field of
+type `hcl.Expression` obtains the expression directly without evaluating it
+first. The Custom Static Decoding Extension is not necessary for that `gohcl`
+technique. You can also implement custom decoding by working directly with
+the lowest-level HCL API, which separates extraction of and evaluation of
+expressions into two steps.)
+
+## Custom Decoding Types
+
+This extension relies on a convention implemented in terms of
+[_Capsule Types_ in the underlying `cty` type system](https://github.com/zclconf/go-cty/blob/master/docs/types.md#capsule-types). `cty` allows a capsule type to carry arbitrary
+extension metadata values as an aid to creating higher-level abstractions like
+this extension.
+
+A custom argument decoding mode, then, is implemented by creating a new `cty`
+capsule type that implements the `ExtensionData` custom operation to return
+a decoding function when requested. For example:
+
+```go
+var keywordType cty.Type
+keywordType = cty.CapsuleWithOps("keyword", reflect.TypeOf(""), &cty.CapsuleOps{
+    ExtensionData: func(key interface{}) interface{} {
+        switch key {
+        case customdecode.CustomExpressionDecoder:
+            return customdecode.CustomExpressionDecoderFunc(
+                func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+                    var diags hcl.Diagnostics
+                    kw := hcl.ExprAsKeyword(expr)
+                    if kw == "" {
+                        diags = append(diags, &hcl.Diagnostic{
+                            Severity: hcl.DiagError,
+                            Summary:  "Invalid keyword",
+                            Detail:   "A keyword is required",
+                            Subject:  expr.Range().Ptr(),
+                        })
+                        return cty.UnkownVal(keywordType), diags
+                    }
+                    return cty.CapsuleVal(keywordType, &kw)
+                },
+            )
+        default:
+            return nil
+        }
+    },
+})
+```
+
+The boilerplate here is a bit fussy, but the important part for our purposes
+is the `case customdecode.CustomExpressionDecoder:` clause, which uses
+a custom extension key type defined in this package to recognize when a
+component implementing this extension is checking to see if a target type
+has a custom decode implementation.
+
+In the above case we've defined a type that decodes expressions as static
+keywords, so a keyword like `foo` would decode as an encapsulated `"foo"`
+string, while any other sort of expression like `"baz"` or `1 + 1` would
+return an error.
+
+We could then use `keywordType` as a type constraint either for a function
+parameter or a `hcldec` attribute specification, which would require the
+argument for that function parameter or the expression for the matching
+attributes to be a static keyword, rather than an arbitrary expression.
+For example, in a `hcldec.AttrSpec`:
+
+```go
+keywordSpec := &hcldec.AttrSpec{
+    Name: "keyword",
+    Type: keywordType,
+}
+```
+
+The above would accept input like the following and would set its result to
+a `cty.Value` of `keywordType`, after decoding:
+
+```hcl
+keyword = foo
+```
+
+## The Expression and Expression Closure `cty` types
+
+Building on the above, this package also includes two capsule types that use
+the above mechanism to allow calling applications to capture expressions
+directly and thus defer analysis to a later step, after initial decoding.
+
+The `customdecode.ExpressionType` type encapsulates an `hcl.Expression` alone,
+for situations like our type constraint expression example above where it's
+the static structure of the expression we want to inspect, and thus any
+variables and functions defined in the evaluation context are irrelevant.
+
+The `customdecode.ExpressionClosureType` type encapsulates a
+`*customdecode.ExpressionClosure` value, which binds the given expression to
+the `hcl.EvalContext` it was asked to evaluate against and thus allows the
+receiver of that result to later perform normal evaluation of the expression
+with all the same variables and functions that would've been available to it
+naturally.
+
+Both of these types can be used as type constraints either for `hcldec`
+attribute specifications or for function arguments. Here's an example of
+`ExpressionClosureType` to implement a function that can evaluate
+an expression with some additional variables defined locally, which we'll
+call the `with(...)` function:
+
+```go
+var WithFunc = function.New(&function.Spec{
+    Params: []function.Parameter{
+        {
+            Name: "variables",
+            Type: cty.DynamicPseudoType,
+        },
+        {
+            Name: "expression",
+            Type: customdecode.ExpressionClosureType,
+        },
+    },
+    Type: func(args []cty.Value) (cty.Type, error) {
+        varsVal := args[0]
+        exprVal := args[1]
+        if !varsVal.Type().IsObjectType() {
+            return cty.NilVal, function.NewArgErrorf(0, "must be an object defining local variables")
+        }
+        if !varsVal.IsKnown() {
+            // We can't predict our result type until the variables object
+            // is known.
+            return cty.DynamicPseudoType, nil
+        }
+        vars := varsVal.AsValueMap()
+        closure := customdecode.ExpressionClosureFromVal(exprVal)
+        result, err := evalWithLocals(vars, closure)
+        if err != nil {
+            return cty.NilVal, err
+        }
+        return result.Type(), nil
+    },
+    Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+        varsVal := args[0]
+        exprVal := args[1]
+        vars := varsVal.AsValueMap()
+        closure := customdecode.ExpressionClosureFromVal(exprVal)
+        return evalWithLocals(vars, closure)
+    },
+})
+
+func evalWithLocals(locals map[string]cty.Value, closure *customdecode.ExpressionClosure) (cty.Value, error) {
+    childCtx := closure.EvalContext.NewChild()
+    childCtx.Variables = locals
+    val, diags := closure.Expression.Value(childCtx)
+    if diags.HasErrors() {
+        return cty.NilVal, function.NewArgErrorf(1, "couldn't evaluate expression: %s", diags.Error())
+    }
+    return val, nil
+}
+```
+
+If the above function were placed into an `hcl.EvalContext` as `with`, it
+could be used in a native syntax call to that function as follows:
+
+```hcl
+  foo = with({name = "Cory"}, "${greeting}, ${name}!")
+```
+
+The above assumes a variable in the main context called `greeting`, to which
+the `with` function adds `name` before evaluating the expression given in
+its second argument. This makes that second argument context-sensitive -- it
+would behave differently if the user wrote the same thing somewhere else -- so
+this capability should be used with care to make sure it doesn't cause confusion
+for the end-users of your language.
+
+There are some other examples of this capability to evaluate expressions in
+unusual ways in the `tryfunc` directory that is a sibling of this one.
diff --git a/ext/customdecode/customdecode.go b/ext/customdecode/customdecode.go
new file mode 100644
index 0000000..c9d7a1e
--- /dev/null
+++ b/ext/customdecode/customdecode.go
@@ -0,0 +1,56 @@
+// Package customdecode contains a HCL extension that allows, in certain
+// contexts, expression evaluation to be overridden by custom static analysis.
+//
+// This mechanism is only supported in certain specific contexts where
+// expressions are decoded with a specific target type in mind. For more
+// information, see the documentation on CustomExpressionDecoder.
+package customdecode
+
+import (
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+type customDecoderImpl int
+
+// CustomExpressionDecoder is a value intended to be used as a cty capsule
+// type ExtensionData key for capsule types whose values are to be obtained
+// by static analysis of an expression rather than normal evaluation of that
+// expression.
+//
+// When a cooperating capsule type is asked for ExtensionData with this key,
+// it must return a non-nil CustomExpressionDecoderFunc value.
+//
+// This mechanism is not universally supported; instead, it's handled in a few
+// specific places where expressions are evaluated with the intent of producing
+// a cty.Value of a type given by the calling application.
+//
+// Specifically, this currently works for type constraints given in
+// hcldec.AttrSpec and hcldec.BlockAttrsSpec, and it works for arguments to
+// function calls in the HCL native syntax. HCL extensions implemented outside
+// of the main HCL module may also implement this; consult their own
+// documentation for details.
+const CustomExpressionDecoder = customDecoderImpl(1)
+
+// CustomExpressionDecoderFunc is the type of value that must be returned by
+// a capsule type handling the key CustomExpressionDecoder in its ExtensionData
+// implementation.
+//
+// If no error diagnostics are returned, the result value MUST be of the
+// capsule type that the decoder function was derived from. If the returned
+// error diagnostics prevent producing a value at all, return cty.NilVal.
+type CustomExpressionDecoderFunc func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics)
+
+// CustomExpressionDecoderForType takes any cty type and returns its
+// custom expression decoder implementation if it has one. If it is not a
+// capsule type or it does not implement a custom expression decoder, this
+// function returns nil.
+func CustomExpressionDecoderForType(ty cty.Type) CustomExpressionDecoderFunc {
+	if !ty.IsCapsuleType() {
+		return nil
+	}
+	if fn, ok := ty.CapsuleExtensionData(CustomExpressionDecoder).(CustomExpressionDecoderFunc); ok {
+		return fn
+	}
+	return nil
+}
diff --git a/ext/customdecode/expression_type.go b/ext/customdecode/expression_type.go
new file mode 100644
index 0000000..af7c66c
--- /dev/null
+++ b/ext/customdecode/expression_type.go
@@ -0,0 +1,146 @@
+package customdecode
+
+import (
+	"fmt"
+	"reflect"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+// ExpressionType is a cty capsule type that carries hcl.Expression values.
+//
+// This type implements custom decoding in the most general way possible: it
+// just captures whatever expression is given to it, with no further processing
+// whatsoever. It could therefore be useful in situations where an application
+// must defer processing of the expression content until a later step.
+//
+// ExpressionType only captures the expression, not the evaluation context it
+// was destined to be evaluated in. That means this type can be fine for
+// situations where the recipient of the value only intends to do static
+// analysis, but ExpressionClosureType is more appropriate in situations where
+// the recipient will eventually evaluate the given expression.
+var ExpressionType cty.Type
+
+// ExpressionVal returns a new cty value of type ExpressionType, wrapping the
+// given expression.
+func ExpressionVal(expr hcl.Expression) cty.Value {
+	return cty.CapsuleVal(ExpressionType, &expr)
+}
+
+// ExpressionFromVal returns the expression encapsulated in the given value, or
+// panics if the value is not a known value of ExpressionType.
+func ExpressionFromVal(v cty.Value) hcl.Expression {
+	if !v.Type().Equals(ExpressionType) {
+		panic("value is not of ExpressionType")
+	}
+	ptr := v.EncapsulatedValue().(*hcl.Expression)
+	return *ptr
+}
+
+// ExpressionClosureType is a cty capsule type that carries hcl.Expression
+// values along with their original evaluation contexts.
+//
+// This is similar to ExpressionType except that during custom decoding it
+// also captures the hcl.EvalContext that was provided, allowing callers to
+// evaluate the expression later in the same context where it would originally
+// have been evaluated, or a context derived from that one.
+var ExpressionClosureType cty.Type
+
+// ExpressionClosure is the type encapsulated in ExpressionClosureType
+type ExpressionClosure struct {
+	Expression  hcl.Expression
+	EvalContext *hcl.EvalContext
+}
+
+// ExpressionClosureVal returns a new cty value of type ExpressionClosureType,
+// wrapping the given expression closure.
+func ExpressionClosureVal(closure *ExpressionClosure) cty.Value {
+	return cty.CapsuleVal(ExpressionClosureType, closure)
+}
+
+// Value evaluates the closure's expression using the closure's EvalContext,
+// returning the result.
+func (c *ExpressionClosure) Value() (cty.Value, hcl.Diagnostics) {
+	return c.Expression.Value(c.EvalContext)
+}
+
+// ExpressionClosureFromVal returns the expression closure encapsulated in the
+// given value, or panics if the value is not a known value of
+// ExpressionClosureType.
+//
+// The caller MUST NOT modify the returned closure or the EvalContext inside
+// it. To derive a new EvalContext, either create a child context or make
+// a copy.
+func ExpressionClosureFromVal(v cty.Value) *ExpressionClosure {
+	if !v.Type().Equals(ExpressionClosureType) {
+		panic("value is not of ExpressionClosureType")
+	}
+	return v.EncapsulatedValue().(*ExpressionClosure)
+}
+
+func init() {
+	// Getting hold of a reflect.Type for hcl.Expression is a bit tricky because
+	// it's an interface type, but we can do it with some indirection.
+	goExpressionType := reflect.TypeOf((*hcl.Expression)(nil)).Elem()
+
+	ExpressionType = cty.CapsuleWithOps("expression", goExpressionType, &cty.CapsuleOps{
+		ExtensionData: func(key interface{}) interface{} {
+			switch key {
+			case CustomExpressionDecoder:
+				return CustomExpressionDecoderFunc(
+					func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+						return ExpressionVal(expr), nil
+					},
+				)
+			default:
+				return nil
+			}
+		},
+		TypeGoString: func(_ reflect.Type) string {
+			return "customdecode.ExpressionType"
+		},
+		GoString: func(raw interface{}) string {
+			exprPtr := raw.(*hcl.Expression)
+			return fmt.Sprintf("customdecode.ExpressionVal(%#v)", *exprPtr)
+		},
+		RawEquals: func(a, b interface{}) bool {
+			aPtr := a.(*hcl.Expression)
+			bPtr := b.(*hcl.Expression)
+			return reflect.DeepEqual(*aPtr, *bPtr)
+		},
+	})
+	ExpressionClosureType = cty.CapsuleWithOps("expression closure", reflect.TypeOf(ExpressionClosure{}), &cty.CapsuleOps{
+		ExtensionData: func(key interface{}) interface{} {
+			switch key {
+			case CustomExpressionDecoder:
+				return CustomExpressionDecoderFunc(
+					func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+						return ExpressionClosureVal(&ExpressionClosure{
+							Expression:  expr,
+							EvalContext: ctx,
+						}), nil
+					},
+				)
+			default:
+				return nil
+			}
+		},
+		TypeGoString: func(_ reflect.Type) string {
+			return "customdecode.ExpressionClosureType"
+		},
+		GoString: func(raw interface{}) string {
+			closure := raw.(*ExpressionClosure)
+			return fmt.Sprintf("customdecode.ExpressionClosureVal(%#v)", closure)
+		},
+		RawEquals: func(a, b interface{}) bool {
+			closureA := a.(*ExpressionClosure)
+			closureB := b.(*ExpressionClosure)
+			// The expression itself compares by deep equality, but EvalContexts
+			// conventionally compare by pointer identity, so we'll comply
+			// with both conventions here by testing them separately.
+			return closureA.EvalContext == closureB.EvalContext &&
+				reflect.DeepEqual(closureA.Expression, closureB.Expression)
+		},
+	})
+}
diff --git a/ext/dynblock/README.md b/ext/dynblock/README.md
new file mode 100644
index 0000000..f59ce92
--- /dev/null
+++ b/ext/dynblock/README.md
@@ -0,0 +1,184 @@
+# HCL Dynamic Blocks Extension
+
+This HCL extension implements a special block type named "dynamic" that can
+be used to dynamically generate blocks of other types by iterating over
+collection values.
+
+Normally the block structure in an HCL configuration file is rigid, even
+though dynamic expressions can be used within attribute values. This is
+convenient for most applications since it allows the overall structure of
+the document to be decoded easily, but in some applications it is desirable
+to allow dynamic block generation within certain portions of the configuration.
+
+Dynamic block generation is performed using the `dynamic` block type:
+
+```hcl
+toplevel {
+  nested {
+    foo = "static block 1"
+  }
+
+  dynamic "nested" {
+    for_each = ["a", "b", "c"]
+    iterator = nested
+    content {
+      foo = "dynamic block ${nested.value}"
+    }
+  }
+
+  nested {
+    foo = "static block 2"
+  }
+}
+```
+
+The above is interpreted as if it were written as follows:
+
+```hcl
+toplevel {
+  nested {
+    foo = "static block 1"
+  }
+
+  nested {
+    foo = "dynamic block a"
+  }
+
+  nested {
+    foo = "dynamic block b"
+  }
+
+  nested {
+    foo = "dynamic block c"
+  }
+
+  nested {
+    foo = "static block 2"
+  }
+}
+```
+
+Since HCL block syntax is not normally exposed to the possibility of unknown
+values, this extension must make some compromises when asked to iterate over
+an unknown collection. If the length of the collection cannot be statically
+recognized (because it is an unknown value of list, map, or set type) then
+the `dynamic` construct will generate a _single_ dynamic block whose iterator
+key and value are both unknown values of the dynamic pseudo-type, thus causing
+any attribute values derived from iteration to appear as unknown values. There
+is no explicit representation of the fact that the length of the collection may
+eventually be different than one.
+
+## Usage
+
+Pass a body to function `Expand` to obtain a new body that will, on access
+to its content, evaluate and expand any nested `dynamic` blocks.
+Dynamic block processing is also automatically propagated into any nested
+blocks that are returned, allowing users to nest dynamic blocks inside
+one another and to nest dynamic blocks inside other static blocks.
+
+HCL structural decoding does not normally have access to an `EvalContext`, so
+any variables and functions that should be available to the `for_each`
+and `labels` expressions must be passed in when calling `Expand`. Expressions
+within the `content` block are evaluated separately and so can be passed a
+separate `EvalContext` if desired, during normal attribute expression
+evaluation.
+
+## Detecting Variables
+
+Some applications dynamically generate an `EvalContext` by analyzing which
+variables are referenced by an expression before evaluating it.
+
+This unfortunately requires some extra effort when this analysis is required
+for the context passed to `Expand`: the HCL API requires a schema to be
+provided in order to do any analysis of the blocks in a body, but the low-level
+schema model provides a description of only one level of nested blocks at
+a time, and thus a new schema must be provided for each additional level of
+nesting.
+
+To make this arduous process as convenient as possible, this package provides
+a helper function `WalkForEachVariables`, which returns a `WalkVariablesNode`
+instance that can be used to find variables directly in a given body and also
+determine which nested blocks require recursive calls. Using this mechanism
+requires that the caller be able to look up a schema given a nested block type.
+For _simple_ formats where a specific block type name always has the same schema
+regardless of context, a walk can be implemented as follows:
+
+```go
+func walkVariables(node dynblock.WalkVariablesNode, schema *hcl.BodySchema) []hcl.Traversal {
+	vars, children := node.Visit(schema)
+
+	for _, child := range children {
+		var childSchema *hcl.BodySchema
+		switch child.BlockTypeName {
+		case "a":
+			childSchema = &hcl.BodySchema{
+				Blocks: []hcl.BlockHeaderSchema{
+					{
+						Type:       "b",
+						LabelNames: []string{"key"},
+					},
+				},
+			}
+		case "b":
+			childSchema = &hcl.BodySchema{
+				Attributes: []hcl.AttributeSchema{
+					{
+						Name:     "val",
+						Required: true,
+					},
+				},
+			}
+		default:
+			// Should never happen, because the above cases should be exhaustive
+			// for the application's configuration format.
+			panic(fmt.Errorf("can't find schema for unknown block type %q", child.BlockTypeName))
+		}
+
+		vars = append(vars, testWalkAndAccumVars(child.Node, childSchema)...)
+	}
+}
+```
+
+### Detecting Variables with `hcldec` Specifications
+
+For applications that use the higher-level `hcldec` package to decode nested
+configuration structures into `cty` values, the same specification can be used
+to automatically drive the recursive variable-detection walk described above.
+
+The helper function `ForEachVariablesHCLDec` allows an entire recursive
+configuration structure to be analyzed in a single call given a `hcldec.Spec`
+that describes the nested block structure. This means a `hcldec`-based
+application can support dynamic blocks with only a little additional effort:
+
+```go
+func decodeBody(body hcl.Body, spec hcldec.Spec) (cty.Value, hcl.Diagnostics) {
+	// Determine which variables are needed to expand dynamic blocks
+	neededForDynamic := dynblock.ForEachVariablesHCLDec(body, spec)
+
+	// Build a suitable EvalContext and expand dynamic blocks
+	dynCtx := buildEvalContext(neededForDynamic)
+	dynBody := dynblock.Expand(body, dynCtx)
+
+	// Determine which variables are needed to fully decode the expanded body
+	// This will analyze expressions that came both from static blocks in the
+	// original body and from blocks that were dynamically added by Expand.
+	neededForDecode := hcldec.Variables(dynBody, spec)
+
+	// Build a suitable EvalContext and then fully decode the body as per the
+	// hcldec specification.
+	decCtx := buildEvalContext(neededForDecode)
+	return hcldec.Decode(dynBody, spec, decCtx)
+}
+
+func buildEvalContext(needed []hcl.Traversal) *hcl.EvalContext {
+	// (to be implemented by your application)
+}
+```
+
+# Performance
+
+This extension is going quite harshly against the grain of the HCL API, and
+so it uses lots of wrapping objects and temporary data structures to get its
+work done. HCL in general is not suitable for use in high-performance situations
+or situations sensitive to memory pressure, but that is _especially_ true for
+this extension.
diff --git a/ext/dynblock/expand_body.go b/ext/dynblock/expand_body.go
new file mode 100644
index 0000000..0b68a7a
--- /dev/null
+++ b/ext/dynblock/expand_body.go
@@ -0,0 +1,248 @@
+package dynblock
+
+import (
+	"fmt"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+// expandBody wraps another hcl.Body and expands any "dynamic" blocks found
+// inside whenever Content or PartialContent is called.
+type expandBody struct {
+	original   hcl.Body
+	forEachCtx *hcl.EvalContext
+	iteration  *iteration // non-nil if we're nested inside another "dynamic" block
+
+	// These are used with PartialContent to produce a "remaining items"
+	// body to return. They are nil on all bodies fresh out of the transformer.
+	//
+	// Note that this is re-implemented here rather than delegating to the
+	// existing support required by the underlying body because we need to
+	// retain access to the entire original body on subsequent decode operations
+	// so we can retain any "dynamic" blocks for types we didn't take consume
+	// on the first pass.
+	hiddenAttrs  map[string]struct{}
+	hiddenBlocks map[string]hcl.BlockHeaderSchema
+}
+
+func (b *expandBody) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
+	extSchema := b.extendSchema(schema)
+	rawContent, diags := b.original.Content(extSchema)
+
+	blocks, blockDiags := b.expandBlocks(schema, rawContent.Blocks, false)
+	diags = append(diags, blockDiags...)
+	attrs := b.prepareAttributes(rawContent.Attributes)
+
+	content := &hcl.BodyContent{
+		Attributes:       attrs,
+		Blocks:           blocks,
+		MissingItemRange: b.original.MissingItemRange(),
+	}
+
+	return content, diags
+}
+
+func (b *expandBody) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
+	extSchema := b.extendSchema(schema)
+	rawContent, _, diags := b.original.PartialContent(extSchema)
+	// We discard the "remain" argument above because we're going to construct
+	// our own remain that also takes into account remaining "dynamic" blocks.
+
+	blocks, blockDiags := b.expandBlocks(schema, rawContent.Blocks, true)
+	diags = append(diags, blockDiags...)
+	attrs := b.prepareAttributes(rawContent.Attributes)
+
+	content := &hcl.BodyContent{
+		Attributes:       attrs,
+		Blocks:           blocks,
+		MissingItemRange: b.original.MissingItemRange(),
+	}
+
+	remain := &expandBody{
+		original:     b.original,
+		forEachCtx:   b.forEachCtx,
+		iteration:    b.iteration,
+		hiddenAttrs:  make(map[string]struct{}),
+		hiddenBlocks: make(map[string]hcl.BlockHeaderSchema),
+	}
+	for name := range b.hiddenAttrs {
+		remain.hiddenAttrs[name] = struct{}{}
+	}
+	for typeName, blockS := range b.hiddenBlocks {
+		remain.hiddenBlocks[typeName] = blockS
+	}
+	for _, attrS := range schema.Attributes {
+		remain.hiddenAttrs[attrS.Name] = struct{}{}
+	}
+	for _, blockS := range schema.Blocks {
+		remain.hiddenBlocks[blockS.Type] = blockS
+	}
+
+	return content, remain, diags
+}
+
+func (b *expandBody) extendSchema(schema *hcl.BodySchema) *hcl.BodySchema {
+	// We augment the requested schema to also include our special "dynamic"
+	// block type, since then we'll get instances of it interleaved with
+	// all of the literal child blocks we must also include.
+	extSchema := &hcl.BodySchema{
+		Attributes: schema.Attributes,
+		Blocks:     make([]hcl.BlockHeaderSchema, len(schema.Blocks), len(schema.Blocks)+len(b.hiddenBlocks)+1),
+	}
+	copy(extSchema.Blocks, schema.Blocks)
+	extSchema.Blocks = append(extSchema.Blocks, dynamicBlockHeaderSchema)
+
+	// If we have any hiddenBlocks then we also need to register those here
+	// so that a call to "Content" on the underlying body won't fail.
+	// (We'll filter these out again once we process the result of either
+	// Content or PartialContent.)
+	for _, blockS := range b.hiddenBlocks {
+		extSchema.Blocks = append(extSchema.Blocks, blockS)
+	}
+
+	// If we have any hiddenAttrs then we also need to register these, for
+	// the same reason as we deal with hiddenBlocks above.
+	if len(b.hiddenAttrs) != 0 {
+		newAttrs := make([]hcl.AttributeSchema, len(schema.Attributes), len(schema.Attributes)+len(b.hiddenAttrs))
+		copy(newAttrs, extSchema.Attributes)
+		for name := range b.hiddenAttrs {
+			newAttrs = append(newAttrs, hcl.AttributeSchema{
+				Name:     name,
+				Required: false,
+			})
+		}
+		extSchema.Attributes = newAttrs
+	}
+
+	return extSchema
+}
+
+func (b *expandBody) prepareAttributes(rawAttrs hcl.Attributes) hcl.Attributes {
+	if len(b.hiddenAttrs) == 0 && b.iteration == nil {
+		// Easy path: just pass through the attrs from the original body verbatim
+		return rawAttrs
+	}
+
+	// Otherwise we have some work to do: we must filter out any attributes
+	// that are hidden (since a previous PartialContent call already saw these)
+	// and wrap the expressions of the inner attributes so that they will
+	// have access to our iteration variables.
+	attrs := make(hcl.Attributes, len(rawAttrs))
+	for name, rawAttr := range rawAttrs {
+		if _, hidden := b.hiddenAttrs[name]; hidden {
+			continue
+		}
+		if b.iteration != nil {
+			attr := *rawAttr // shallow copy so we can mutate it
+			attr.Expr = exprWrap{
+				Expression: attr.Expr,
+				i:          b.iteration,
+			}
+			attrs[name] = &attr
+		} else {
+			// If we have no active iteration then no wrapping is required.
+			attrs[name] = rawAttr
+		}
+	}
+	return attrs
+}
+
+func (b *expandBody) expandBlocks(schema *hcl.BodySchema, rawBlocks hcl.Blocks, partial bool) (hcl.Blocks, hcl.Diagnostics) {
+	var blocks hcl.Blocks
+	var diags hcl.Diagnostics
+
+	for _, rawBlock := range rawBlocks {
+		switch rawBlock.Type {
+		case "dynamic":
+			realBlockType := rawBlock.Labels[0]
+			if _, hidden := b.hiddenBlocks[realBlockType]; hidden {
+				continue
+			}
+
+			var blockS *hcl.BlockHeaderSchema
+			for _, candidate := range schema.Blocks {
+				if candidate.Type == realBlockType {
+					blockS = &candidate
+					break
+				}
+			}
+			if blockS == nil {
+				// Not a block type that the caller requested.
+				if !partial {
+					diags = append(diags, &hcl.Diagnostic{
+						Severity: hcl.DiagError,
+						Summary:  "Unsupported block type",
+						Detail:   fmt.Sprintf("Blocks of type %q are not expected here.", realBlockType),
+						Subject:  &rawBlock.LabelRanges[0],
+					})
+				}
+				continue
+			}
+
+			spec, specDiags := b.decodeSpec(blockS, rawBlock)
+			diags = append(diags, specDiags...)
+			if specDiags.HasErrors() {
+				continue
+			}
+
+			if spec.forEachVal.IsKnown() {
+				for it := spec.forEachVal.ElementIterator(); it.Next(); {
+					key, value := it.Element()
+					i := b.iteration.MakeChild(spec.iteratorName, key, value)
+
+					block, blockDiags := spec.newBlock(i, b.forEachCtx)
+					diags = append(diags, blockDiags...)
+					if block != nil {
+						// Attach our new iteration context so that attributes
+						// and other nested blocks can refer to our iterator.
+						block.Body = b.expandChild(block.Body, i)
+						blocks = append(blocks, block)
+					}
+				}
+			} else {
+				// If our top-level iteration value isn't known then we
+				// substitute an unknownBody, which will cause the entire block
+				// to evaluate to an unknown value.
+				i := b.iteration.MakeChild(spec.iteratorName, cty.DynamicVal, cty.DynamicVal)
+				block, blockDiags := spec.newBlock(i, b.forEachCtx)
+				diags = append(diags, blockDiags...)
+				if block != nil {
+					block.Body = unknownBody{b.expandChild(block.Body, i)}
+					blocks = append(blocks, block)
+				}
+			}
+
+		default:
+			if _, hidden := b.hiddenBlocks[rawBlock.Type]; !hidden {
+				// A static block doesn't create a new iteration context, but
+				// it does need to inherit _our own_ iteration context in
+				// case it contains expressions that refer to our inherited
+				// iterators, or nested "dynamic" blocks.
+				expandedBlock := *rawBlock // shallow copy
+				expandedBlock.Body = b.expandChild(rawBlock.Body, b.iteration)
+				blocks = append(blocks, &expandedBlock)
+			}
+		}
+	}
+
+	return blocks, diags
+}
+
+func (b *expandBody) expandChild(child hcl.Body, i *iteration) hcl.Body {
+	chiCtx := i.EvalContext(b.forEachCtx)
+	ret := Expand(child, chiCtx)
+	ret.(*expandBody).iteration = i
+	return ret
+}
+
+func (b *expandBody) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
+	// blocks aren't allowed in JustAttributes mode and this body can
+	// only produce blocks, so we'll just pass straight through to our
+	// underlying body here.
+	return b.original.JustAttributes()
+}
+
+func (b *expandBody) MissingItemRange() hcl.Range {
+	return b.original.MissingItemRange()
+}
diff --git a/ext/dynblock/expand_body_test.go b/ext/dynblock/expand_body_test.go
new file mode 100644
index 0000000..a654452
--- /dev/null
+++ b/ext/dynblock/expand_body_test.go
@@ -0,0 +1,623 @@
+package dynblock
+
+import (
+	"strings"
+	"testing"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hcldec"
+	"github.com/hashicorp/hcl/v2/hcltest"
+	"github.com/zclconf/go-cty/cty"
+)
+
+func TestExpand(t *testing.T) {
+	srcBody := hcltest.MockBody(&hcl.BodyContent{
+		Blocks: hcl.Blocks{
+			{
+				Type:        "a",
+				Labels:      []string{"static0"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"val": hcltest.MockExprLiteral(cty.StringVal("static a 0")),
+					}),
+				}),
+			},
+			{
+				Type: "b",
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Blocks: hcl.Blocks{
+						{
+							Type: "c",
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+									"val0": hcltest.MockExprLiteral(cty.StringVal("static c 0")),
+								}),
+							}),
+						},
+						{
+							Type:        "dynamic",
+							Labels:      []string{"c"},
+							LabelRanges: []hcl.Range{hcl.Range{}},
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+									"for_each": hcltest.MockExprLiteral(cty.ListVal([]cty.Value{
+										cty.StringVal("dynamic c 0"),
+										cty.StringVal("dynamic c 1"),
+									})),
+									"iterator": hcltest.MockExprVariable("dyn_c"),
+								}),
+								Blocks: hcl.Blocks{
+									{
+										Type: "content",
+										Body: hcltest.MockBody(&hcl.BodyContent{
+											Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+												"val0": hcltest.MockExprTraversalSrc("dyn_c.value"),
+											}),
+										}),
+									},
+								},
+							}),
+						},
+					},
+				}),
+			},
+			{
+				Type:        "dynamic",
+				Labels:      []string{"a"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"for_each": hcltest.MockExprLiteral(cty.ListVal([]cty.Value{
+							cty.StringVal("dynamic a 0"),
+							cty.StringVal("dynamic a 1"),
+							cty.StringVal("dynamic a 2"),
+						})),
+						"labels": hcltest.MockExprList([]hcl.Expression{
+							hcltest.MockExprTraversalSrc("a.key"),
+						}),
+					}),
+					Blocks: hcl.Blocks{
+						{
+							Type: "content",
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+									"val": hcltest.MockExprTraversalSrc("a.value"),
+								}),
+							}),
+						},
+					},
+				}),
+			},
+			{
+				Type:        "dynamic",
+				Labels:      []string{"b"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"for_each": hcltest.MockExprLiteral(cty.ListVal([]cty.Value{
+							cty.StringVal("dynamic b 0"),
+							cty.StringVal("dynamic b 1"),
+						})),
+						"iterator": hcltest.MockExprVariable("dyn_b"),
+					}),
+					Blocks: hcl.Blocks{
+						{
+							Type: "content",
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Blocks: hcl.Blocks{
+									{
+										Type: "c",
+										Body: hcltest.MockBody(&hcl.BodyContent{
+											Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+												"val0": hcltest.MockExprLiteral(cty.StringVal("static c 1")),
+												"val1": hcltest.MockExprTraversalSrc("dyn_b.value"),
+											}),
+										}),
+									},
+									{
+										Type:        "dynamic",
+										Labels:      []string{"c"},
+										LabelRanges: []hcl.Range{hcl.Range{}},
+										Body: hcltest.MockBody(&hcl.BodyContent{
+											Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+												"for_each": hcltest.MockExprLiteral(cty.ListVal([]cty.Value{
+													cty.StringVal("dynamic c 2"),
+													cty.StringVal("dynamic c 3"),
+												})),
+											}),
+											Blocks: hcl.Blocks{
+												{
+													Type: "content",
+													Body: hcltest.MockBody(&hcl.BodyContent{
+														Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+															"val0": hcltest.MockExprTraversalSrc("c.value"),
+															"val1": hcltest.MockExprTraversalSrc("dyn_b.value"),
+														}),
+													}),
+												},
+											},
+										}),
+									},
+								},
+							}),
+						},
+					},
+				}),
+			},
+			{
+				Type:        "dynamic",
+				Labels:      []string{"b"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"for_each": hcltest.MockExprLiteral(cty.MapVal(map[string]cty.Value{
+							"foo": cty.ListVal([]cty.Value{
+								cty.StringVal("dynamic c nested 0"),
+								cty.StringVal("dynamic c nested 1"),
+							}),
+						})),
+						"iterator": hcltest.MockExprVariable("dyn_b"),
+					}),
+					Blocks: hcl.Blocks{
+						{
+							Type: "content",
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Blocks: hcl.Blocks{
+									{
+										Type:        "dynamic",
+										Labels:      []string{"c"},
+										LabelRanges: []hcl.Range{hcl.Range{}},
+										Body: hcltest.MockBody(&hcl.BodyContent{
+											Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+												"for_each": hcltest.MockExprTraversalSrc("dyn_b.value"),
+											}),
+											Blocks: hcl.Blocks{
+												{
+													Type: "content",
+													Body: hcltest.MockBody(&hcl.BodyContent{
+														Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+															"val0": hcltest.MockExprTraversalSrc("c.value"),
+															"val1": hcltest.MockExprTraversalSrc("dyn_b.key"),
+														}),
+													}),
+												},
+											},
+										}),
+									},
+								},
+							}),
+						},
+					},
+				}),
+			},
+			{
+				Type:        "a",
+				Labels:      []string{"static1"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"val": hcltest.MockExprLiteral(cty.StringVal("static a 1")),
+					}),
+				}),
+			},
+		},
+	})
+
+	dynBody := Expand(srcBody, nil)
+	var remain hcl.Body
+
+	t.Run("PartialDecode", func(t *testing.T) {
+		decSpec := &hcldec.BlockMapSpec{
+			TypeName:   "a",
+			LabelNames: []string{"key"},
+			Nested: &hcldec.AttrSpec{
+				Name:     "val",
+				Type:     cty.String,
+				Required: true,
+			},
+		}
+
+		var got cty.Value
+		var diags hcl.Diagnostics
+		got, remain, diags = hcldec.PartialDecode(dynBody, decSpec, nil)
+		if len(diags) != 0 {
+			t.Errorf("unexpected diagnostics")
+			for _, diag := range diags {
+				t.Logf("- %s", diag)
+			}
+			return
+		}
+
+		want := cty.MapVal(map[string]cty.Value{
+			"static0": cty.StringVal("static a 0"),
+			"static1": cty.StringVal("static a 1"),
+			"0":       cty.StringVal("dynamic a 0"),
+			"1":       cty.StringVal("dynamic a 1"),
+			"2":       cty.StringVal("dynamic a 2"),
+		})
+
+		if !got.RawEquals(want) {
+			t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, want)
+		}
+	})
+
+	t.Run("Decode", func(t *testing.T) {
+		decSpec := &hcldec.BlockListSpec{
+			TypeName: "b",
+			Nested: &hcldec.BlockListSpec{
+				TypeName: "c",
+				Nested: &hcldec.ObjectSpec{
+					"val0": &hcldec.AttrSpec{
+						Name: "val0",
+						Type: cty.String,
+					},
+					"val1": &hcldec.AttrSpec{
+						Name: "val1",
+						Type: cty.String,
+					},
+				},
+			},
+		}
+
+		var got cty.Value
+		var diags hcl.Diagnostics
+		got, diags = hcldec.Decode(remain, decSpec, nil)
+		if len(diags) != 0 {
+			t.Errorf("unexpected diagnostics")
+			for _, diag := range diags {
+				t.Logf("- %s", diag)
+			}
+			return
+		}
+
+		want := cty.ListVal([]cty.Value{
+			cty.ListVal([]cty.Value{
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("static c 0"),
+					"val1": cty.NullVal(cty.String),
+				}),
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("dynamic c 0"),
+					"val1": cty.NullVal(cty.String),
+				}),
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("dynamic c 1"),
+					"val1": cty.NullVal(cty.String),
+				}),
+			}),
+			cty.ListVal([]cty.Value{
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("static c 1"),
+					"val1": cty.StringVal("dynamic b 0"),
+				}),
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("dynamic c 2"),
+					"val1": cty.StringVal("dynamic b 0"),
+				}),
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("dynamic c 3"),
+					"val1": cty.StringVal("dynamic b 0"),
+				}),
+			}),
+			cty.ListVal([]cty.Value{
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("static c 1"),
+					"val1": cty.StringVal("dynamic b 1"),
+				}),
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("dynamic c 2"),
+					"val1": cty.StringVal("dynamic b 1"),
+				}),
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("dynamic c 3"),
+					"val1": cty.StringVal("dynamic b 1"),
+				}),
+			}),
+			cty.ListVal([]cty.Value{
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("dynamic c nested 0"),
+					"val1": cty.StringVal("foo"),
+				}),
+				cty.ObjectVal(map[string]cty.Value{
+					"val0": cty.StringVal("dynamic c nested 1"),
+					"val1": cty.StringVal("foo"),
+				}),
+			}),
+		})
+
+		if !got.RawEquals(want) {
+			t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, want)
+		}
+	})
+
+}
+
+func TestExpandUnknownBodies(t *testing.T) {
+	srcContent := &hcl.BodyContent{
+		Blocks: hcl.Blocks{
+			{
+				Type:        "dynamic",
+				Labels:      []string{"list"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))),
+					}),
+					Blocks: hcl.Blocks{
+						{
+							Type: "content",
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+									"val": hcltest.MockExprTraversalSrc("each.value"),
+								}),
+							}),
+						},
+					},
+				}),
+			},
+			{
+				Type:        "dynamic",
+				Labels:      []string{"tuple"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))),
+					}),
+					Blocks: hcl.Blocks{
+						{
+							Type: "content",
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+									"val": hcltest.MockExprTraversalSrc("each.value"),
+								}),
+							}),
+						},
+					},
+				}),
+			},
+			{
+				Type:        "dynamic",
+				Labels:      []string{"set"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))),
+					}),
+					Blocks: hcl.Blocks{
+						{
+							Type: "content",
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+									"val": hcltest.MockExprTraversalSrc("each.value"),
+								}),
+							}),
+						},
+					},
+				}),
+			},
+			{
+				Type:        "dynamic",
+				Labels:      []string{"map"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))),
+						"labels": hcltest.MockExprList([]hcl.Expression{
+							hcltest.MockExprLiteral(cty.StringVal("static")),
+						}),
+					}),
+					Blocks: hcl.Blocks{
+						{
+							Type: "content",
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+									"val": hcltest.MockExprTraversalSrc("each.value"),
+								}),
+							}),
+						},
+					},
+				}),
+			},
+			{
+				Type:        "dynamic",
+				Labels:      []string{"object"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))),
+						"labels": hcltest.MockExprList([]hcl.Expression{
+							hcltest.MockExprLiteral(cty.StringVal("static")),
+						}),
+					}),
+					Blocks: hcl.Blocks{
+						{
+							Type: "content",
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+									"val": hcltest.MockExprTraversalSrc("each.value"),
+								}),
+							}),
+						},
+					},
+				}),
+			},
+			{
+				Type:        "dynamic",
+				Labels:      []string{"invalid_list"},
+				LabelRanges: []hcl.Range{hcl.Range{}},
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+						"for_each": hcltest.MockExprLiteral(cty.UnknownVal(cty.Map(cty.String))),
+					}),
+					Blocks: hcl.Blocks{
+						{
+							Type: "content",
+							Body: hcltest.MockBody(&hcl.BodyContent{
+								Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+									"val": hcltest.MockExprTraversalSrc("each.value"),
+									// unexpected attributes should still produce an error
+									"invalid": hcltest.MockExprLiteral(cty.StringVal("static")),
+								}),
+							}),
+						},
+					},
+				}),
+			},
+		},
+	}
+
+	srcBody := hcltest.MockBody(srcContent)
+	dynBody := Expand(srcBody, nil)
+
+	t.Run("DecodeList", func(t *testing.T) {
+		decSpec := &hcldec.BlockListSpec{
+			TypeName: "list",
+			Nested: &hcldec.ObjectSpec{
+				"val": &hcldec.AttrSpec{
+					Name: "val",
+					Type: cty.String,
+				},
+			},
+		}
+
+		var got cty.Value
+		var diags hcl.Diagnostics
+
+		got, _, diags = hcldec.PartialDecode(dynBody, decSpec, nil)
+		if len(diags) != 0 {
+			t.Errorf("unexpected diagnostics")
+			for _, diag := range diags {
+				t.Logf("- %s", diag)
+			}
+			return
+		}
+
+		want := cty.UnknownVal(cty.List(cty.Object(map[string]cty.Type{
+			"val": cty.String,
+		})))
+
+		if !got.RawEquals(want) {
+			t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, want)
+		}
+	})
+
+	t.Run("DecodeTuple", func(t *testing.T) {
+		decSpec := &hcldec.BlockTupleSpec{
+			TypeName: "tuple",
+			Nested: &hcldec.ObjectSpec{
+				"val": &hcldec.AttrSpec{
+					Name: "val",
+					Type: cty.String,
+				},
+			},
+		}
+
+		var got cty.Value
+		var diags hcl.Diagnostics
+
+		got, _, diags = hcldec.PartialDecode(dynBody, decSpec, nil)
+		if len(diags) != 0 {
+			t.Errorf("unexpected diagnostics")
+			for _, diag := range diags {
+				t.Logf("- %s", diag)
+			}
+			return
+		}
+
+		want := cty.DynamicVal
+
+		if !got.RawEquals(want) {
+			t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, want)
+		}
+	})
+
+	t.Run("DecodeSet", func(t *testing.T) {
+		decSpec := &hcldec.BlockSetSpec{
+			TypeName: "tuple",
+			Nested: &hcldec.ObjectSpec{
+				"val": &hcldec.AttrSpec{
+					Name: "val",
+					Type: cty.String,
+				},
+			},
+		}
+
+		var got cty.Value
+		var diags hcl.Diagnostics
+
+		got, _, diags = hcldec.PartialDecode(dynBody, decSpec, nil)
+		if len(diags) != 0 {
+			t.Errorf("unexpected diagnostics")
+			for _, diag := range diags {
+				t.Logf("- %s", diag)
+			}
+			return
+		}
+
+		want := cty.UnknownVal(cty.Set(cty.Object(map[string]cty.Type{
+			"val": cty.String,
+		})))
+
+		if !got.RawEquals(want) {
+			t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, want)
+		}
+	})
+
+	t.Run("DecodeMap", func(t *testing.T) {
+		decSpec := &hcldec.BlockMapSpec{
+			TypeName:   "map",
+			LabelNames: []string{"key"},
+			Nested: &hcldec.ObjectSpec{
+				"val": &hcldec.AttrSpec{
+					Name: "val",
+					Type: cty.String,
+				},
+			},
+		}
+
+		var got cty.Value
+		var diags hcl.Diagnostics
+
+		got, _, diags = hcldec.PartialDecode(dynBody, decSpec, nil)
+		if len(diags) != 0 {
+			t.Errorf("unexpected diagnostics")
+			for _, diag := range diags {
+				t.Logf("- %s", diag)
+			}
+			return
+		}
+
+		want := cty.UnknownVal(cty.Map(cty.Object(map[string]cty.Type{
+			"val": cty.String,
+		})))
+
+		if !got.RawEquals(want) {
+			t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, want)
+		}
+	})
+
+	t.Run("DecodeInvalidList", func(t *testing.T) {
+		decSpec := &hcldec.BlockListSpec{
+			TypeName: "invalid_list",
+			Nested: &hcldec.ObjectSpec{
+				"val": &hcldec.AttrSpec{
+					Name: "val",
+					Type: cty.String,
+				},
+			},
+		}
+
+		_, _, diags := hcldec.PartialDecode(dynBody, decSpec, nil)
+		if len(diags) != 1 {
+			t.Error("expected 1 extraneous argument")
+		}
+
+		want := `Mock body has extraneous argument "invalid"`
+
+		if !strings.Contains(diags.Error(), want) {
+			t.Errorf("unexpected diagnostics: %v", diags)
+		}
+	})
+
+}
diff --git a/ext/dynblock/expand_spec.go b/ext/dynblock/expand_spec.go
new file mode 100644
index 0000000..98a51ea
--- /dev/null
+++ b/ext/dynblock/expand_spec.go
@@ -0,0 +1,215 @@
+package dynblock
+
+import (
+	"fmt"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/convert"
+)
+
+type expandSpec struct {
+	blockType      string
+	blockTypeRange hcl.Range
+	defRange       hcl.Range
+	forEachVal     cty.Value
+	iteratorName   string
+	labelExprs     []hcl.Expression
+	contentBody    hcl.Body
+	inherited      map[string]*iteration
+}
+
+func (b *expandBody) decodeSpec(blockS *hcl.BlockHeaderSchema, rawSpec *hcl.Block) (*expandSpec, hcl.Diagnostics) {
+	var diags hcl.Diagnostics
+
+	var schema *hcl.BodySchema
+	if len(blockS.LabelNames) != 0 {
+		schema = dynamicBlockBodySchemaLabels
+	} else {
+		schema = dynamicBlockBodySchemaNoLabels
+	}
+
+	specContent, specDiags := rawSpec.Body.Content(schema)
+	diags = append(diags, specDiags...)
+	if specDiags.HasErrors() {
+		return nil, diags
+	}
+
+	//// for_each attribute
+
+	eachAttr := specContent.Attributes["for_each"]
+	eachVal, eachDiags := eachAttr.Expr.Value(b.forEachCtx)
+	diags = append(diags, eachDiags...)
+
+	if !eachVal.CanIterateElements() && eachVal.Type() != cty.DynamicPseudoType {
+		// We skip this error for DynamicPseudoType because that means we either
+		// have a null (which is checked immediately below) or an unknown
+		// (which is handled in the expandBody Content methods).
+		diags = append(diags, &hcl.Diagnostic{
+			Severity:    hcl.DiagError,
+			Summary:     "Invalid dynamic for_each value",
+			Detail:      fmt.Sprintf("Cannot use a %s value in for_each. An iterable collection is required.", eachVal.Type().FriendlyName()),
+			Subject:     eachAttr.Expr.Range().Ptr(),
+			Expression:  eachAttr.Expr,
+			EvalContext: b.forEachCtx,
+		})
+		return nil, diags
+	}
+	if eachVal.IsNull() {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity:    hcl.DiagError,
+			Summary:     "Invalid dynamic for_each value",
+			Detail:      "Cannot use a null value in for_each.",
+			Subject:     eachAttr.Expr.Range().Ptr(),
+			Expression:  eachAttr.Expr,
+			EvalContext: b.forEachCtx,
+		})
+		return nil, diags
+	}
+
+	//// iterator attribute
+
+	iteratorName := blockS.Type
+	if iteratorAttr := specContent.Attributes["iterator"]; iteratorAttr != nil {
+		itTraversal, itDiags := hcl.AbsTraversalForExpr(iteratorAttr.Expr)
+		diags = append(diags, itDiags...)
+		if itDiags.HasErrors() {
+			return nil, diags
+		}
+
+		if len(itTraversal) != 1 {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity: hcl.DiagError,
+				Summary:  "Invalid dynamic iterator name",
+				Detail:   "Dynamic iterator must be a single variable name.",
+				Subject:  itTraversal.SourceRange().Ptr(),
+			})
+			return nil, diags
+		}
+
+		iteratorName = itTraversal.RootName()
+	}
+
+	var labelExprs []hcl.Expression
+	if labelsAttr := specContent.Attributes["labels"]; labelsAttr != nil {
+		var labelDiags hcl.Diagnostics
+		labelExprs, labelDiags = hcl.ExprList(labelsAttr.Expr)
+		diags = append(diags, labelDiags...)
+		if labelDiags.HasErrors() {
+			return nil, diags
+		}
+
+		if len(labelExprs) > len(blockS.LabelNames) {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity: hcl.DiagError,
+				Summary:  "Extraneous dynamic block label",
+				Detail:   fmt.Sprintf("Blocks of type %q require %d label(s).", blockS.Type, len(blockS.LabelNames)),
+				Subject:  labelExprs[len(blockS.LabelNames)].Range().Ptr(),
+			})
+			return nil, diags
+		} else if len(labelExprs) < len(blockS.LabelNames) {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity: hcl.DiagError,
+				Summary:  "Insufficient dynamic block labels",
+				Detail:   fmt.Sprintf("Blocks of type %q require %d label(s).", blockS.Type, len(blockS.LabelNames)),
+				Subject:  labelsAttr.Expr.Range().Ptr(),
+			})
+			return nil, diags
+		}
+	}
+
+	// Since our schema requests only blocks of type "content", we can assume
+	// that all entries in specContent.Blocks are content blocks.
+	if len(specContent.Blocks) == 0 {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Missing dynamic content block",
+			Detail:   "A dynamic block must have a nested block of type \"content\" to describe the body of each generated block.",
+			Subject:  &specContent.MissingItemRange,
+		})
+		return nil, diags
+	}
+	if len(specContent.Blocks) > 1 {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Extraneous dynamic content block",
+			Detail:   "Only one nested content block is allowed for each dynamic block.",
+			Subject:  &specContent.Blocks[1].DefRange,
+		})
+		return nil, diags
+	}
+
+	return &expandSpec{
+		blockType:      blockS.Type,
+		blockTypeRange: rawSpec.LabelRanges[0],
+		defRange:       rawSpec.DefRange,
+		forEachVal:     eachVal,
+		iteratorName:   iteratorName,
+		labelExprs:     labelExprs,
+		contentBody:    specContent.Blocks[0].Body,
+	}, diags
+}
+
+func (s *expandSpec) newBlock(i *iteration, ctx *hcl.EvalContext) (*hcl.Block, hcl.Diagnostics) {
+	var diags hcl.Diagnostics
+	var labels []string
+	var labelRanges []hcl.Range
+	lCtx := i.EvalContext(ctx)
+	for _, labelExpr := range s.labelExprs {
+		labelVal, labelDiags := labelExpr.Value(lCtx)
+		diags = append(diags, labelDiags...)
+		if labelDiags.HasErrors() {
+			return nil, diags
+		}
+
+		var convErr error
+		labelVal, convErr = convert.Convert(labelVal, cty.String)
+		if convErr != nil {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity:    hcl.DiagError,
+				Summary:     "Invalid dynamic block label",
+				Detail:      fmt.Sprintf("Cannot use this value as a dynamic block label: %s.", convErr),
+				Subject:     labelExpr.Range().Ptr(),
+				Expression:  labelExpr,
+				EvalContext: lCtx,
+			})
+			return nil, diags
+		}
+		if labelVal.IsNull() {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity:    hcl.DiagError,
+				Summary:     "Invalid dynamic block label",
+				Detail:      "Cannot use a null value as a dynamic block label.",
+				Subject:     labelExpr.Range().Ptr(),
+				Expression:  labelExpr,
+				EvalContext: lCtx,
+			})
+			return nil, diags
+		}
+		if !labelVal.IsKnown() {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity:    hcl.DiagError,
+				Summary:     "Invalid dynamic block label",
+				Detail:      "This value is not yet known. Dynamic block labels must be immediately-known values.",
+				Subject:     labelExpr.Range().Ptr(),
+				Expression:  labelExpr,
+				EvalContext: lCtx,
+			})
+			return nil, diags
+		}
+
+		labels = append(labels, labelVal.AsString())
+		labelRanges = append(labelRanges, labelExpr.Range())
+	}
+
+	block := &hcl.Block{
+		Type:        s.blockType,
+		TypeRange:   s.blockTypeRange,
+		Labels:      labels,
+		LabelRanges: labelRanges,
+		DefRange:    s.defRange,
+		Body:        s.contentBody,
+	}
+
+	return block, diags
+}
diff --git a/ext/dynblock/expr_wrap.go b/ext/dynblock/expr_wrap.go
new file mode 100644
index 0000000..460a1d2
--- /dev/null
+++ b/ext/dynblock/expr_wrap.go
@@ -0,0 +1,42 @@
+package dynblock
+
+import (
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+type exprWrap struct {
+	hcl.Expression
+	i *iteration
+}
+
+func (e exprWrap) Variables() []hcl.Traversal {
+	raw := e.Expression.Variables()
+	ret := make([]hcl.Traversal, 0, len(raw))
+
+	// Filter out traversals that refer to our iterator name or any
+	// iterator we've inherited; we're going to provide those in
+	// our Value wrapper, so the caller doesn't need to know about them.
+	for _, traversal := range raw {
+		rootName := traversal.RootName()
+		if rootName == e.i.IteratorName {
+			continue
+		}
+		if _, inherited := e.i.Inherited[rootName]; inherited {
+			continue
+		}
+		ret = append(ret, traversal)
+	}
+	return ret
+}
+
+func (e exprWrap) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+	extCtx := e.i.EvalContext(ctx)
+	return e.Expression.Value(extCtx)
+}
+
+// UnwrapExpression returns the expression being wrapped by this instance.
+// This allows the original expression to be recovered by hcl.UnwrapExpression.
+func (e exprWrap) UnwrapExpression() hcl.Expression {
+	return e.Expression
+}
diff --git a/ext/dynblock/iteration.go b/ext/dynblock/iteration.go
new file mode 100644
index 0000000..c566388
--- /dev/null
+++ b/ext/dynblock/iteration.go
@@ -0,0 +1,66 @@
+package dynblock
+
+import (
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+type iteration struct {
+	IteratorName string
+	Key          cty.Value
+	Value        cty.Value
+	Inherited    map[string]*iteration
+}
+
+func (s *expandSpec) MakeIteration(key, value cty.Value) *iteration {
+	return &iteration{
+		IteratorName: s.iteratorName,
+		Key:          key,
+		Value:        value,
+		Inherited:    s.inherited,
+	}
+}
+
+func (i *iteration) Object() cty.Value {
+	return cty.ObjectVal(map[string]cty.Value{
+		"key":   i.Key,
+		"value": i.Value,
+	})
+}
+
+func (i *iteration) EvalContext(base *hcl.EvalContext) *hcl.EvalContext {
+	new := base.NewChild()
+
+	if i != nil {
+		new.Variables = map[string]cty.Value{}
+		for name, otherIt := range i.Inherited {
+			new.Variables[name] = otherIt.Object()
+		}
+		new.Variables[i.IteratorName] = i.Object()
+	}
+
+	return new
+}
+
+func (i *iteration) MakeChild(iteratorName string, key, value cty.Value) *iteration {
+	if i == nil {
+		// Create entirely new root iteration, then
+		return &iteration{
+			IteratorName: iteratorName,
+			Key:          key,
+			Value:        value,
+		}
+	}
+
+	inherited := map[string]*iteration{}
+	for name, otherIt := range i.Inherited {
+		inherited[name] = otherIt
+	}
+	inherited[i.IteratorName] = i
+	return &iteration{
+		IteratorName: iteratorName,
+		Key:          key,
+		Value:        value,
+		Inherited:    inherited,
+	}
+}
diff --git a/ext/dynblock/public.go b/ext/dynblock/public.go
new file mode 100644
index 0000000..a5bfd94
--- /dev/null
+++ b/ext/dynblock/public.go
@@ -0,0 +1,47 @@
+// Package dynblock provides an extension to HCL that allows dynamic
+// declaration of nested blocks in certain contexts via a special block type
+// named "dynamic".
+package dynblock
+
+import (
+	"github.com/hashicorp/hcl/v2"
+)
+
+// Expand "dynamic" blocks in the given body, returning a new body that
+// has those blocks expanded.
+//
+// The given EvalContext is used when evaluating "for_each" and "labels"
+// attributes within dynamic blocks, allowing those expressions access to
+// variables and functions beyond the iterator variable created by the
+// iteration.
+//
+// Expand returns no diagnostics because no blocks are actually expanded
+// until a call to Content or PartialContent on the returned body, which
+// will then expand only the blocks selected by the schema.
+//
+// "dynamic" blocks are also expanded automatically within nested blocks
+// in the given body, including within other dynamic blocks, thus allowing
+// multi-dimensional iteration. However, it is not possible to
+// dynamically-generate the "dynamic" blocks themselves except through nesting.
+//
+//     parent {
+//       dynamic "child" {
+//         for_each = child_objs
+//         content {
+//           dynamic "grandchild" {
+//             for_each = child.value.children
+//             labels   = [grandchild.key]
+//             content {
+//               parent_key = child.key
+//               value      = grandchild.value
+//             }
+//           }
+//         }
+//       }
+//     }
+func Expand(body hcl.Body, ctx *hcl.EvalContext) hcl.Body {
+	return &expandBody{
+		original:   body,
+		forEachCtx: ctx,
+	}
+}
diff --git a/ext/dynblock/schema.go b/ext/dynblock/schema.go
new file mode 100644
index 0000000..b3907d6
--- /dev/null
+++ b/ext/dynblock/schema.go
@@ -0,0 +1,50 @@
+package dynblock
+
+import "github.com/hashicorp/hcl/v2"
+
+var dynamicBlockHeaderSchema = hcl.BlockHeaderSchema{
+	Type:       "dynamic",
+	LabelNames: []string{"type"},
+}
+
+var dynamicBlockBodySchemaLabels = &hcl.BodySchema{
+	Attributes: []hcl.AttributeSchema{
+		{
+			Name:     "for_each",
+			Required: true,
+		},
+		{
+			Name:     "iterator",
+			Required: false,
+		},
+		{
+			Name:     "labels",
+			Required: true,
+		},
+	},
+	Blocks: []hcl.BlockHeaderSchema{
+		{
+			Type:       "content",
+			LabelNames: nil,
+		},
+	},
+}
+
+var dynamicBlockBodySchemaNoLabels = &hcl.BodySchema{
+	Attributes: []hcl.AttributeSchema{
+		{
+			Name:     "for_each",
+			Required: true,
+		},
+		{
+			Name:     "iterator",
+			Required: false,
+		},
+	},
+	Blocks: []hcl.BlockHeaderSchema{
+		{
+			Type:       "content",
+			LabelNames: nil,
+		},
+	},
+}
diff --git a/ext/dynblock/unknown_body.go b/ext/dynblock/unknown_body.go
new file mode 100644
index 0000000..caa2085
--- /dev/null
+++ b/ext/dynblock/unknown_body.go
@@ -0,0 +1,89 @@
+package dynblock
+
+import (
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+// unknownBody is a funny body that just reports everything inside it as
+// unknown. It uses a given other body as a sort of template for what attributes
+// and blocks are inside -- including source location information -- but
+// subsitutes unknown values of unknown type for all attributes.
+//
+// This rather odd process is used to handle expansion of dynamic blocks whose
+// for_each expression is unknown. Since a block cannot itself be unknown,
+// we instead arrange for everything _inside_ the block to be unknown instead,
+// to give the best possible approximation.
+type unknownBody struct {
+	template hcl.Body
+}
+
+var _ hcl.Body = unknownBody{}
+
+// hcldec.UnkownBody impl
+func (b unknownBody) Unknown() bool {
+	return true
+}
+
+func (b unknownBody) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
+	content, diags := b.template.Content(schema)
+	content = b.fixupContent(content)
+
+	// We're intentionally preserving the diagnostics reported from the
+	// inner body so that we can still report where the template body doesn't
+	// match the requested schema.
+	return content, diags
+}
+
+func (b unknownBody) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
+	content, remain, diags := b.template.PartialContent(schema)
+	content = b.fixupContent(content)
+	remain = unknownBody{remain} // remaining content must also be wrapped
+
+	// We're intentionally preserving the diagnostics reported from the
+	// inner body so that we can still report where the template body doesn't
+	// match the requested schema.
+	return content, remain, diags
+}
+
+func (b unknownBody) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
+	attrs, diags := b.template.JustAttributes()
+	attrs = b.fixupAttrs(attrs)
+
+	// We're intentionally preserving the diagnostics reported from the
+	// inner body so that we can still report where the template body doesn't
+	// match the requested schema.
+	return attrs, diags
+}
+
+func (b unknownBody) MissingItemRange() hcl.Range {
+	return b.template.MissingItemRange()
+}
+
+func (b unknownBody) fixupContent(got *hcl.BodyContent) *hcl.BodyContent {
+	ret := &hcl.BodyContent{}
+	ret.Attributes = b.fixupAttrs(got.Attributes)
+	if len(got.Blocks) > 0 {
+		ret.Blocks = make(hcl.Blocks, 0, len(got.Blocks))
+		for _, gotBlock := range got.Blocks {
+			new := *gotBlock                      // shallow copy
+			new.Body = unknownBody{gotBlock.Body} // nested content must also be marked unknown
+			ret.Blocks = append(ret.Blocks, &new)
+		}
+	}
+
+	return ret
+}
+
+func (b unknownBody) fixupAttrs(got hcl.Attributes) hcl.Attributes {
+	if len(got) == 0 {
+		return nil
+	}
+	ret := make(hcl.Attributes, len(got))
+	for name, gotAttr := range got {
+		new := *gotAttr // shallow copy
+		new.Expr = hcl.StaticExpr(cty.DynamicVal, gotAttr.Expr.Range())
+		ret[name] = &new
+	}
+	return ret
+}
diff --git a/ext/dynblock/variables.go b/ext/dynblock/variables.go
new file mode 100644
index 0000000..1923392
--- /dev/null
+++ b/ext/dynblock/variables.go
@@ -0,0 +1,209 @@
+package dynblock
+
+import (
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+// WalkVariables begins the recursive process of walking all expressions and
+// nested blocks in the given body and its child bodies while taking into
+// account any "dynamic" blocks.
+//
+// This function requires that the caller walk through the nested block
+// structure in the given body level-by-level so that an appropriate schema
+// can be provided at each level to inform further processing. This workflow
+// is thus easiest to use for calling applications that have some higher-level
+// schema representation available with which to drive this multi-step
+// process. If your application uses the hcldec package, you may be able to
+// use VariablesHCLDec instead for a more automatic approach.
+func WalkVariables(body hcl.Body) WalkVariablesNode {
+	return WalkVariablesNode{
+		body:           body,
+		includeContent: true,
+	}
+}
+
+// WalkExpandVariables is like Variables but it includes only the variables
+// required for successful block expansion, ignoring any variables referenced
+// inside block contents. The result is the minimal set of all variables
+// required for a call to Expand, excluding variables that would only be
+// needed to subsequently call Content or PartialContent on the expanded
+// body.
+func WalkExpandVariables(body hcl.Body) WalkVariablesNode {
+	return WalkVariablesNode{
+		body: body,
+	}
+}
+
+type WalkVariablesNode struct {
+	body hcl.Body
+	it   *iteration
+
+	includeContent bool
+}
+
+type WalkVariablesChild struct {
+	BlockTypeName string
+	Node          WalkVariablesNode
+}
+
+// Body returns the HCL Body associated with the child node, in case the caller
+// wants to do some sort of inspection of it in order to decide what schema
+// to pass to Visit.
+//
+// Most implementations should just fetch a fixed schema based on the
+// BlockTypeName field and not access this. Deciding on a schema dynamically
+// based on the body is a strange thing to do and generally necessary only if
+// your caller is already doing other bizarre things with HCL bodies.
+func (c WalkVariablesChild) Body() hcl.Body {
+	return c.Node.body
+}
+
+// Visit returns the variable traversals required for any "dynamic" blocks
+// directly in the body associated with this node, and also returns any child
+// nodes that must be visited in order to continue the walk.
+//
+// Each child node has its associated block type name given in its BlockTypeName
+// field, which the calling application should use to determine the appropriate
+// schema for the content of each child node and pass it to the child node's
+// own Visit method to continue the walk recursively.
+func (n WalkVariablesNode) Visit(schema *hcl.BodySchema) (vars []hcl.Traversal, children []WalkVariablesChild) {
+	extSchema := n.extendSchema(schema)
+	container, _, _ := n.body.PartialContent(extSchema)
+	if container == nil {
+		return vars, children
+	}
+
+	children = make([]WalkVariablesChild, 0, len(container.Blocks))
+
+	if n.includeContent {
+		for _, attr := range container.Attributes {
+			for _, traversal := range attr.Expr.Variables() {
+				var ours, inherited bool
+				if n.it != nil {
+					ours = traversal.RootName() == n.it.IteratorName
+					_, inherited = n.it.Inherited[traversal.RootName()]
+				}
+
+				if !(ours || inherited) {
+					vars = append(vars, traversal)
+				}
+			}
+		}
+	}
+
+	for _, block := range container.Blocks {
+		switch block.Type {
+
+		case "dynamic":
+			blockTypeName := block.Labels[0]
+			inner, _, _ := block.Body.PartialContent(variableDetectionInnerSchema)
+			if inner == nil {
+				continue
+			}
+
+			iteratorName := blockTypeName
+			if attr, exists := inner.Attributes["iterator"]; exists {
+				iterTraversal, _ := hcl.AbsTraversalForExpr(attr.Expr)
+				if len(iterTraversal) == 0 {
+					// Ignore this invalid dynamic block, since it'll produce
+					// an error if someone tries to extract content from it
+					// later anyway.
+					continue
+				}
+				iteratorName = iterTraversal.RootName()
+			}
+			blockIt := n.it.MakeChild(iteratorName, cty.DynamicVal, cty.DynamicVal)
+
+			if attr, exists := inner.Attributes["for_each"]; exists {
+				// Filter out iterator names inherited from parent blocks
+				for _, traversal := range attr.Expr.Variables() {
+					if _, inherited := blockIt.Inherited[traversal.RootName()]; !inherited {
+						vars = append(vars, traversal)
+					}
+				}
+			}
+			if attr, exists := inner.Attributes["labels"]; exists {
+				// Filter out both our own iterator name _and_ those inherited
+				// from parent blocks, since we provide _both_ of these to the
+				// label expressions.
+				for _, traversal := range attr.Expr.Variables() {
+					ours := traversal.RootName() == iteratorName
+					_, inherited := blockIt.Inherited[traversal.RootName()]
+
+					if !(ours || inherited) {
+						vars = append(vars, traversal)
+					}
+				}
+			}
+
+			for _, contentBlock := range inner.Blocks {
+				// We only request "content" blocks in our schema, so we know
+				// any blocks we find here will be content blocks. We require
+				// exactly one content block for actual expansion, but we'll
+				// be more liberal here so that callers can still collect
+				// variables from erroneous "dynamic" blocks.
+				children = append(children, WalkVariablesChild{
+					BlockTypeName: blockTypeName,
+					Node: WalkVariablesNode{
+						body:           contentBlock.Body,
+						it:             blockIt,
+						includeContent: n.includeContent,
+					},
+				})
+			}
+
+		default:
+			children = append(children, WalkVariablesChild{
+				BlockTypeName: block.Type,
+				Node: WalkVariablesNode{
+					body:           block.Body,
+					it:             n.it,
+					includeContent: n.includeContent,
+				},
+			})
+
+		}
+	}
+
+	return vars, children
+}
+
+func (n WalkVariablesNode) extendSchema(schema *hcl.BodySchema) *hcl.BodySchema {
+	// We augment the requested schema to also include our special "dynamic"
+	// block type, since then we'll get instances of it interleaved with
+	// all of the literal child blocks we must also include.
+	extSchema := &hcl.BodySchema{
+		Attributes: schema.Attributes,
+		Blocks:     make([]hcl.BlockHeaderSchema, len(schema.Blocks), len(schema.Blocks)+1),
+	}
+	copy(extSchema.Blocks, schema.Blocks)
+	extSchema.Blocks = append(extSchema.Blocks, dynamicBlockHeaderSchema)
+
+	return extSchema
+}
+
+// This is a more relaxed schema than what's in schema.go, since we
+// want to maximize the amount of variables we can find even if there
+// are erroneous blocks.
+var variableDetectionInnerSchema = &hcl.BodySchema{
+	Attributes: []hcl.AttributeSchema{
+		{
+			Name:     "for_each",
+			Required: false,
+		},
+		{
+			Name:     "labels",
+			Required: false,
+		},
+		{
+			Name:     "iterator",
+			Required: false,
+		},
+	},
+	Blocks: []hcl.BlockHeaderSchema{
+		{
+			Type: "content",
+		},
+	},
+}
diff --git a/ext/dynblock/variables_hcldec.go b/ext/dynblock/variables_hcldec.go
new file mode 100644
index 0000000..907ef3e
--- /dev/null
+++ b/ext/dynblock/variables_hcldec.go
@@ -0,0 +1,43 @@
+package dynblock
+
+import (
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hcldec"
+)
+
+// VariablesHCLDec is a wrapper around WalkVariables that uses the given hcldec
+// specification to automatically drive the recursive walk through nested
+// blocks in the given body.
+//
+// This is a drop-in replacement for hcldec.Variables which is able to treat
+// blocks of type "dynamic" in the same special way that dynblock.Expand would,
+// exposing both the variables referenced in the "for_each" and "labels"
+// arguments and variables used in the nested "content" block.
+func VariablesHCLDec(body hcl.Body, spec hcldec.Spec) []hcl.Traversal {
+	rootNode := WalkVariables(body)
+	return walkVariablesWithHCLDec(rootNode, spec)
+}
+
+// ExpandVariablesHCLDec is like VariablesHCLDec but it includes only the
+// minimal set of variables required to call Expand, ignoring variables that
+// are referenced only inside normal block contents. See WalkExpandVariables
+// for more information.
+func ExpandVariablesHCLDec(body hcl.Body, spec hcldec.Spec) []hcl.Traversal {
+	rootNode := WalkExpandVariables(body)
+	return walkVariablesWithHCLDec(rootNode, spec)
+}
+
+func walkVariablesWithHCLDec(node WalkVariablesNode, spec hcldec.Spec) []hcl.Traversal {
+	vars, children := node.Visit(hcldec.ImpliedSchema(spec))
+
+	if len(children) > 0 {
+		childSpecs := hcldec.ChildBlockTypes(spec)
+		for _, child := range children {
+			if childSpec, exists := childSpecs[child.BlockTypeName]; exists {
+				vars = append(vars, walkVariablesWithHCLDec(child.Node, childSpec)...)
+			}
+		}
+	}
+
+	return vars
+}
diff --git a/ext/dynblock/variables_test.go b/ext/dynblock/variables_test.go
new file mode 100644
index 0000000..b3a2d72
--- /dev/null
+++ b/ext/dynblock/variables_test.go
@@ -0,0 +1,155 @@
+package dynblock
+
+import (
+	"reflect"
+	"testing"
+
+	"github.com/hashicorp/hcl/v2/hcldec"
+	"github.com/zclconf/go-cty/cty"
+
+	"github.com/davecgh/go-spew/spew"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hclsyntax"
+)
+
+func TestVariables(t *testing.T) {
+	const src = `
+
+# We have some references to things inside the "val" attribute inside each
+# of our "b" blocks, which should be included in the result of WalkVariables
+# but not WalkExpandVariables.
+
+a {
+  dynamic "b" {
+    for_each = [for i, v in some_list_0: "${i}=${v},${baz}"]
+    labels = ["${b.value} ${something_else_0}"]
+    content {
+      val = "${b.value} ${something_else_1}"
+    }
+  }
+}
+
+dynamic "a" {
+  for_each = some_list_1
+
+  content {
+    b "foo" {
+      val = "${a.value} ${something_else_2}"
+    }
+
+    dynamic "b" {
+      for_each = some_list_2
+      iterator = dyn_b
+      labels = ["${a.value} ${dyn_b.value} ${b} ${something_else_3}"]
+      content {
+        val = "${a.value} ${dyn_b.value} ${something_else_4}"
+      }
+    }
+  }
+}
+
+dynamic "a" {
+  for_each = some_list_3
+  iterator = dyn_a
+
+  content {
+    b "foo" {
+      val = "${dyn_a.value} ${something_else_5}"
+    }
+
+    dynamic "b" {
+      for_each = some_list_4
+      labels = ["${dyn_a.value} ${b.value} ${a} ${something_else_6}"]
+      content {
+        val = "${dyn_a.value} ${b.value} ${something_else_7}"
+      }
+    }
+  }
+}
+`
+
+	f, diags := hclsyntax.ParseConfig([]byte(src), "", hcl.Pos{})
+	if len(diags) != 0 {
+		t.Errorf("unexpected diagnostics during parse")
+		for _, diag := range diags {
+			t.Logf("- %s", diag)
+		}
+		return
+	}
+
+	spec := &hcldec.BlockListSpec{
+		TypeName: "a",
+		Nested: &hcldec.BlockMapSpec{
+			TypeName:   "b",
+			LabelNames: []string{"key"},
+			Nested: &hcldec.AttrSpec{
+				Name: "val",
+				Type: cty.String,
+			},
+		},
+	}
+
+	t.Run("WalkVariables", func(t *testing.T) {
+		traversals := VariablesHCLDec(f.Body, spec)
+		got := make([]string, len(traversals))
+		for i, traversal := range traversals {
+			got[i] = traversal.RootName()
+		}
+
+		// The block structure is traversed one level at a time, so the ordering
+		// here is reflecting first a pass of the root, then the first child
+		// under the root, then the first child under that, etc.
+		want := []string{
+			"some_list_1",
+			"some_list_3",
+			"some_list_0",
+			"baz",
+			"something_else_0",
+			"something_else_1", // Would not be included for WalkExpandVariables because it only appears in content
+			"some_list_2",
+			"b", // This is correct because it is referenced in a context where the iterator is overridden to be dyn_b
+			"something_else_3",
+			"something_else_2", // Would not be included for WalkExpandVariables because it only appears in content
+			"something_else_4", // Would not be included for WalkExpandVariables because it only appears in content
+			"some_list_4",
+			"a", // This is correct because it is referenced in a context where the iterator is overridden to be dyn_a
+			"something_else_6",
+			"something_else_5", // Would not be included for WalkExpandVariables because it only appears in content
+			"something_else_7", // Would not be included for WalkExpandVariables because it only appears in content
+		}
+
+		if !reflect.DeepEqual(got, want) {
+			t.Errorf("wrong result\ngot: %swant: %s", spew.Sdump(got), spew.Sdump(want))
+		}
+	})
+
+	t.Run("WalkExpandVariables", func(t *testing.T) {
+		traversals := ExpandVariablesHCLDec(f.Body, spec)
+		got := make([]string, len(traversals))
+		for i, traversal := range traversals {
+			got[i] = traversal.RootName()
+		}
+
+		// The block structure is traversed one level at a time, so the ordering
+		// here is reflecting first a pass of the root, then the first child
+		// under the root, then the first child under that, etc.
+		want := []string{
+			"some_list_1",
+			"some_list_3",
+			"some_list_0",
+			"baz",
+			"something_else_0",
+			"some_list_2",
+			"b", // This is correct because it is referenced in a context where the iterator is overridden to be dyn_b
+			"something_else_3",
+			"some_list_4",
+			"a", // This is correct because it is referenced in a context where the iterator is overridden to be dyn_a
+			"something_else_6",
+		}
+
+		if !reflect.DeepEqual(got, want) {
+			t.Errorf("wrong result\ngot: %swant: %s", spew.Sdump(got), spew.Sdump(want))
+		}
+	})
+}
diff --git a/ext/transform/doc.go b/ext/transform/doc.go
new file mode 100644
index 0000000..ac46669
--- /dev/null
+++ b/ext/transform/doc.go
@@ -0,0 +1,7 @@
+// Package transform is a helper package for writing extensions that work
+// by applying transforms to bodies.
+//
+// It defines a type for body transformers, and then provides utilities in
+// terms of that type for working with transformers, including recursively
+// applying such transforms as heirarchical block structures are extracted.
+package transform
diff --git a/ext/transform/error.go b/ext/transform/error.go
new file mode 100644
index 0000000..6063e2b
--- /dev/null
+++ b/ext/transform/error.go
@@ -0,0 +1,108 @@
+package transform
+
+import (
+	"github.com/hashicorp/hcl/v2"
+)
+
+// NewErrorBody returns a hcl.Body that returns the given diagnostics whenever
+// any of its content-access methods are called.
+//
+// The given diagnostics must have at least one diagnostic of severity
+// hcl.DiagError, or this function will panic.
+//
+// This can be used to prepare a return value for a Transformer that
+// can't complete due to an error. While the transform itself will succeed,
+// the error will be returned as soon as a caller attempts to extract content
+// from the resulting body.
+func NewErrorBody(diags hcl.Diagnostics) hcl.Body {
+	if !diags.HasErrors() {
+		panic("NewErrorBody called without any error diagnostics")
+	}
+	return diagBody{
+		Diags: diags,
+	}
+}
+
+// BodyWithDiagnostics returns a hcl.Body that wraps another hcl.Body
+// and emits the given diagnostics for any content-extraction method.
+//
+// Unlike the result of NewErrorBody, a body with diagnostics still runs
+// the extraction actions on the underlying body if (and only if) the given
+// diagnostics do not contain errors, but prepends the given diagnostics with
+// any diagnostics produced by the action.
+//
+// If the given diagnostics is empty, the given body is returned verbatim.
+//
+// This function is intended for conveniently reporting errors and/or warnings
+// produced during a transform, ensuring that they will be seen when the
+// caller eventually extracts content from the returned body.
+func BodyWithDiagnostics(body hcl.Body, diags hcl.Diagnostics) hcl.Body {
+	if len(diags) == 0 {
+		// nothing to do!
+		return body
+	}
+
+	return diagBody{
+		Diags:   diags,
+		Wrapped: body,
+	}
+}
+
+type diagBody struct {
+	Diags   hcl.Diagnostics
+	Wrapped hcl.Body
+}
+
+func (b diagBody) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
+	if b.Diags.HasErrors() {
+		return b.emptyContent(), b.Diags
+	}
+
+	content, wrappedDiags := b.Wrapped.Content(schema)
+	diags := make(hcl.Diagnostics, 0, len(b.Diags)+len(wrappedDiags))
+	diags = append(diags, b.Diags...)
+	diags = append(diags, wrappedDiags...)
+	return content, diags
+}
+
+func (b diagBody) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
+	if b.Diags.HasErrors() {
+		return b.emptyContent(), b.Wrapped, b.Diags
+	}
+
+	content, remain, wrappedDiags := b.Wrapped.PartialContent(schema)
+	diags := make(hcl.Diagnostics, 0, len(b.Diags)+len(wrappedDiags))
+	diags = append(diags, b.Diags...)
+	diags = append(diags, wrappedDiags...)
+	return content, remain, diags
+}
+
+func (b diagBody) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
+	if b.Diags.HasErrors() {
+		return nil, b.Diags
+	}
+
+	attributes, wrappedDiags := b.Wrapped.JustAttributes()
+	diags := make(hcl.Diagnostics, 0, len(b.Diags)+len(wrappedDiags))
+	diags = append(diags, b.Diags...)
+	diags = append(diags, wrappedDiags...)
+	return attributes, diags
+}
+
+func (b diagBody) MissingItemRange() hcl.Range {
+	if b.Wrapped != nil {
+		return b.Wrapped.MissingItemRange()
+	}
+
+	// Placeholder. This should never be seen in practice because decoding
+	// a diagBody without a wrapped body should always produce an error.
+	return hcl.Range{
+		Filename: "<empty>",
+	}
+}
+
+func (b diagBody) emptyContent() *hcl.BodyContent {
+	return &hcl.BodyContent{
+		MissingItemRange: b.MissingItemRange(),
+	}
+}
diff --git a/ext/transform/transform.go b/ext/transform/transform.go
new file mode 100644
index 0000000..f0f5ff6
--- /dev/null
+++ b/ext/transform/transform.go
@@ -0,0 +1,83 @@
+package transform
+
+import (
+	"github.com/hashicorp/hcl/v2"
+)
+
+// Shallow is equivalent to calling transformer.TransformBody(body), and
+// is provided only for completeness of the top-level API.
+func Shallow(body hcl.Body, transformer Transformer) hcl.Body {
+	return transformer.TransformBody(body)
+}
+
+// Deep applies the given transform to the given body and then
+// wraps the result such that any descendent blocks that are decoded will
+// also have the transform applied to their bodies.
+//
+// This allows for language extensions that define a particular block type
+// for a particular body and all nested blocks within it.
+//
+// Due to the wrapping behavior, the body resulting from this function
+// will not be of the type returned by the transformer. Callers may call
+// only the methods defined for interface hcl.Body, and may not type-assert
+// to access other methods.
+func Deep(body hcl.Body, transformer Transformer) hcl.Body {
+	return deepWrapper{
+		Transformed: transformer.TransformBody(body),
+		Transformer: transformer,
+	}
+}
+
+// deepWrapper is a hcl.Body implementation that ensures that a given
+// transformer is applied to another given body when content is extracted,
+// and that it recursively applies to any child blocks that are extracted.
+type deepWrapper struct {
+	Transformed hcl.Body
+	Transformer Transformer
+}
+
+func (w deepWrapper) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
+	content, diags := w.Transformed.Content(schema)
+	content = w.transformContent(content)
+	return content, diags
+}
+
+func (w deepWrapper) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
+	content, remain, diags := w.Transformed.PartialContent(schema)
+	content = w.transformContent(content)
+	return content, remain, diags
+}
+
+func (w deepWrapper) transformContent(content *hcl.BodyContent) *hcl.BodyContent {
+	if len(content.Blocks) == 0 {
+		// Easy path: if there are no blocks then there are no child bodies to wrap
+		return content
+	}
+
+	// Since we're going to change things here, we'll be polite and clone the
+	// structure so that we don't risk impacting any internal state of the
+	// original body.
+	ret := &hcl.BodyContent{
+		Attributes:       content.Attributes,
+		MissingItemRange: content.MissingItemRange,
+		Blocks:           make(hcl.Blocks, len(content.Blocks)),
+	}
+
+	for i, givenBlock := range content.Blocks {
+		// Shallow-copy the block so we can mutate it
+		newBlock := *givenBlock
+		newBlock.Body = Deep(newBlock.Body, w.Transformer)
+		ret.Blocks[i] = &newBlock
+	}
+
+	return ret
+}
+
+func (w deepWrapper) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
+	// Attributes can't have bodies or nested blocks, so this is just a thin wrapper.
+	return w.Transformed.JustAttributes()
+}
+
+func (w deepWrapper) MissingItemRange() hcl.Range {
+	return w.Transformed.MissingItemRange()
+}
diff --git a/ext/transform/transform_test.go b/ext/transform/transform_test.go
new file mode 100644
index 0000000..7b68aca
--- /dev/null
+++ b/ext/transform/transform_test.go
@@ -0,0 +1,102 @@
+package transform
+
+import (
+	"testing"
+
+	"reflect"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hcltest"
+	"github.com/zclconf/go-cty/cty"
+)
+
+// Assert that deepWrapper implements Body
+var deepWrapperIsBody hcl.Body = deepWrapper{}
+
+func TestDeep(t *testing.T) {
+
+	testTransform := TransformerFunc(func(body hcl.Body) hcl.Body {
+		_, remain, diags := body.PartialContent(&hcl.BodySchema{
+			Blocks: []hcl.BlockHeaderSchema{
+				{
+					Type: "remove",
+				},
+			},
+		})
+
+		return BodyWithDiagnostics(remain, diags)
+	})
+
+	src := hcltest.MockBody(&hcl.BodyContent{
+		Attributes: hcltest.MockAttrs(map[string]hcl.Expression{
+			"true": hcltest.MockExprLiteral(cty.True),
+		}),
+		Blocks: []*hcl.Block{
+			{
+				Type: "remove",
+				Body: hcl.EmptyBody(),
+			},
+			{
+				Type: "child",
+				Body: hcltest.MockBody(&hcl.BodyContent{
+					Blocks: []*hcl.Block{
+						{
+							Type: "remove",
+						},
+					},
+				}),
+			},
+		},
+	})
+
+	wrapped := Deep(src, testTransform)
+
+	rootContent, diags := wrapped.Content(&hcl.BodySchema{
+		Attributes: []hcl.AttributeSchema{
+			{
+				Name: "true",
+			},
+		},
+		Blocks: []hcl.BlockHeaderSchema{
+			{
+				Type: "child",
+			},
+		},
+	})
+	if len(diags) != 0 {
+		t.Errorf("unexpected diagnostics for root content")
+		for _, diag := range diags {
+			t.Logf("- %s", diag)
+		}
+	}
+
+	wantAttrs := hcltest.MockAttrs(map[string]hcl.Expression{
+		"true": hcltest.MockExprLiteral(cty.True),
+	})
+	if !reflect.DeepEqual(rootContent.Attributes, wantAttrs) {
+		t.Errorf("wrong root attributes\ngot:  %#v\nwant: %#v", rootContent.Attributes, wantAttrs)
+	}
+
+	if got, want := len(rootContent.Blocks), 1; got != want {
+		t.Fatalf("wrong number of root blocks %d; want %d", got, want)
+	}
+	if got, want := rootContent.Blocks[0].Type, "child"; got != want {
+		t.Errorf("wrong block type %s; want %s", got, want)
+	}
+
+	childBlock := rootContent.Blocks[0]
+	childContent, diags := childBlock.Body.Content(&hcl.BodySchema{})
+	if len(diags) != 0 {
+		t.Errorf("unexpected diagnostics for child content")
+		for _, diag := range diags {
+			t.Logf("- %s", diag)
+		}
+	}
+
+	if len(childContent.Attributes) != 0 {
+		t.Errorf("unexpected attributes in child content; want empty content")
+	}
+	if len(childContent.Blocks) != 0 {
+		t.Errorf("unexpected blocks in child content; want empty content")
+	}
+}
diff --git a/ext/transform/transformer.go b/ext/transform/transformer.go
new file mode 100644
index 0000000..2599cdd
--- /dev/null
+++ b/ext/transform/transformer.go
@@ -0,0 +1,40 @@
+package transform
+
+import (
+	"github.com/hashicorp/hcl/v2"
+)
+
+// A Transformer takes a given body, applies some (possibly no-op)
+// transform to it, and returns the new body.
+//
+// It must _not_ mutate the given body in-place.
+//
+// The transform call cannot fail, but it _can_ return a body that immediately
+// returns diagnostics when its methods are called. NewErrorBody is a utility
+// to help with this.
+type Transformer interface {
+	TransformBody(hcl.Body) hcl.Body
+}
+
+// TransformerFunc is a function type that implements Transformer.
+type TransformerFunc func(hcl.Body) hcl.Body
+
+// TransformBody is an implementation of Transformer.TransformBody.
+func (f TransformerFunc) TransformBody(in hcl.Body) hcl.Body {
+	return f(in)
+}
+
+type chain []Transformer
+
+// Chain takes a slice of transformers and returns a single new
+// Transformer that applies each of the given transformers in sequence.
+func Chain(c []Transformer) Transformer {
+	return chain(c)
+}
+
+func (c chain) TransformBody(body hcl.Body) hcl.Body {
+	for _, t := range c {
+		body = t.TransformBody(body)
+	}
+	return body
+}
diff --git a/ext/tryfunc/README.md b/ext/tryfunc/README.md
new file mode 100644
index 0000000..5d56eec
--- /dev/null
+++ b/ext/tryfunc/README.md
@@ -0,0 +1,44 @@
+# "Try" and "can" functions
+
+This Go package contains two `cty` functions intended for use in an
+`hcl.EvalContext` when evaluating HCL native syntax expressions.
+
+The first function `try` attempts to evaluate each of its argument expressions
+in order until one produces a result without any errors.
+
+```hcl
+try(non_existent_variable, 2) # returns 2
+```
+
+If none of the expressions succeed, the function call fails with all of the
+errors it encountered.
+
+The second function `can` is similar except that it ignores the result of
+the given expression altogether and simply returns `true` if the expression
+produced a successful result or `false` if it produced errors.
+
+Both of these are primarily intended for working with deep data structures
+which might not have a dependable shape. For example, we can use `try` to
+attempt to fetch a value from deep inside a data structure but produce a
+default value if any step of the traversal fails:
+
+```hcl
+result = try(foo.deep[0].lots.of["traversals"], null)
+```
+
+The final result to `try` should generally be some sort of constant value that
+will always evaluate successfully.
+
+## Using these functions
+
+Languages built on HCL can make `try` and `can` available to user code by
+exporting them in the `hcl.EvalContext` used for expression evaluation:
+
+```go
+ctx := &hcl.EvalContext{
+    Functions: map[string]function.Function{
+        "try": tryfunc.TryFunc,
+        "can": tryfunc.CanFunc,
+    },
+}
+```
diff --git a/ext/tryfunc/tryfunc.go b/ext/tryfunc/tryfunc.go
new file mode 100644
index 0000000..2f4862f
--- /dev/null
+++ b/ext/tryfunc/tryfunc.go
@@ -0,0 +1,150 @@
+// Package tryfunc contains some optional functions that can be exposed in
+// HCL-based languages to allow authors to test whether a particular expression
+// can succeed and take dynamic action based on that result.
+//
+// These functions are implemented in terms of the customdecode extension from
+// the sibling directory "customdecode", and so they are only useful when
+// used within an HCL EvalContext. Other systems using cty functions are
+// unlikely to support the HCL-specific "customdecode" extension.
+package tryfunc
+
+import (
+	"errors"
+	"fmt"
+	"strings"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/ext/customdecode"
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/function"
+)
+
+// TryFunc is a variadic function that tries to evaluate all of is arguments
+// in sequence until one succeeds, in which case it returns that result, or
+// returns an error if none of them succeed.
+var TryFunc function.Function
+
+// CanFunc tries to evaluate the expression given in its first argument.
+var CanFunc function.Function
+
+func init() {
+	TryFunc = function.New(&function.Spec{
+		VarParam: &function.Parameter{
+			Name: "expressions",
+			Type: customdecode.ExpressionClosureType,
+		},
+		Type: func(args []cty.Value) (cty.Type, error) {
+			v, err := try(args)
+			if err != nil {
+				return cty.NilType, err
+			}
+			return v.Type(), nil
+		},
+		Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+			return try(args)
+		},
+	})
+	CanFunc = function.New(&function.Spec{
+		Params: []function.Parameter{
+			{
+				Name: "expression",
+				Type: customdecode.ExpressionClosureType,
+			},
+		},
+		Type: function.StaticReturnType(cty.Bool),
+		Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+			return can(args[0])
+		},
+	})
+}
+
+func try(args []cty.Value) (cty.Value, error) {
+	if len(args) == 0 {
+		return cty.NilVal, errors.New("at least one argument is required")
+	}
+
+	// We'll collect up all of the diagnostics we encounter along the way
+	// and report them all if none of the expressions succeed, so that the
+	// user might get some hints on how to make at least one succeed.
+	var diags hcl.Diagnostics
+	for _, arg := range args {
+		closure := customdecode.ExpressionClosureFromVal(arg)
+		if dependsOnUnknowns(closure.Expression, closure.EvalContext) {
+			// We can't safely decide if this expression will succeed yet,
+			// and so our entire result must be unknown until we have
+			// more information.
+			return cty.DynamicVal, nil
+		}
+
+		v, moreDiags := closure.Value()
+		diags = append(diags, moreDiags...)
+		if moreDiags.HasErrors() {
+			continue // try the next one, if there is one to try
+		}
+		return v, nil // ignore any accumulated diagnostics if one succeeds
+	}
+
+	// If we fall out here then none of the expressions succeeded, and so
+	// we must have at least one diagnostic and we'll return all of them
+	// so that the user can see the errors related to whichever one they
+	// were expecting to have succeeded in this case.
+	//
+	// Because our function must return a single error value rather than
+	// diagnostics, we'll construct a suitable error message string
+	// that will make sense in the context of the function call failure
+	// diagnostic HCL will eventually wrap this in.
+	var buf strings.Builder
+	buf.WriteString("no expression succeeded:\n")
+	for _, diag := range diags {
+		if diag.Subject != nil {
+			buf.WriteString(fmt.Sprintf("- %s (at %s)\n  %s\n", diag.Summary, diag.Subject, diag.Detail))
+		} else {
+			buf.WriteString(fmt.Sprintf("- %s\n  %s\n", diag.Summary, diag.Detail))
+		}
+	}
+	buf.WriteString("\nAt least one expression must produce a successful result")
+	return cty.NilVal, errors.New(buf.String())
+}
+
+func can(arg cty.Value) (cty.Value, error) {
+	closure := customdecode.ExpressionClosureFromVal(arg)
+	if dependsOnUnknowns(closure.Expression, closure.EvalContext) {
+		// Can't decide yet, then.
+		return cty.UnknownVal(cty.Bool), nil
+	}
+
+	_, diags := closure.Value()
+	if diags.HasErrors() {
+		return cty.False, nil
+	}
+	return cty.True, nil
+}
+
+// dependsOnUnknowns returns true if any of the variables that the given
+// expression might access are unknown values or contain unknown values.
+//
+// This is a conservative result that prefers to return true if there's any
+// chance that the expression might derive from an unknown value during its
+// evaluation; it is likely to produce false-positives for more complex
+// expressions involving deep data structures.
+func dependsOnUnknowns(expr hcl.Expression, ctx *hcl.EvalContext) bool {
+	for _, traversal := range expr.Variables() {
+		val, diags := traversal.TraverseAbs(ctx)
+		if diags.HasErrors() {
+			// If the traversal returned a definitive error then it must
+			// not traverse through any unknowns.
+			continue
+		}
+		if !val.IsWhollyKnown() {
+			// The value will be unknown if either it refers directly to
+			// an unknown value or if the traversal moves through an unknown
+			// collection. We're using IsWhollyKnown, so this also catches
+			// situations where the traversal refers to a compound data
+			// structure that contains any unknown values. That's important,
+			// because during evaluation the expression might evaluate more
+			// deeply into this structure and encounter the unknowns.
+			return true
+		}
+	}
+	return false
+}
diff --git a/ext/tryfunc/tryfunc_test.go b/ext/tryfunc/tryfunc_test.go
new file mode 100644
index 0000000..dc32a6d
--- /dev/null
+++ b/ext/tryfunc/tryfunc_test.go
@@ -0,0 +1,231 @@
+package tryfunc
+
+import (
+	"testing"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hclsyntax"
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/function"
+)
+
+func TestTryFunc(t *testing.T) {
+	tests := map[string]struct {
+		expr    string
+		vars    map[string]cty.Value
+		want    cty.Value
+		wantErr string
+	}{
+		"one argument succeeds": {
+			`try(1)`,
+			nil,
+			cty.NumberIntVal(1),
+			``,
+		},
+		"one marked argument succeeds": {
+			`try(sensitive)`,
+			map[string]cty.Value{
+				"sensitive": cty.StringVal("secret").Mark("porpoise"),
+			},
+			cty.StringVal("secret").Mark("porpoise"),
+			``,
+		},
+		"two arguments, first succeeds": {
+			`try(1, 2)`,
+			nil,
+			cty.NumberIntVal(1),
+			``,
+		},
+		"two arguments, first fails": {
+			`try(nope, 2)`,
+			nil,
+			cty.NumberIntVal(2),
+			``,
+		},
+		"two arguments, first depends on unknowns": {
+			`try(unknown, 2)`,
+			map[string]cty.Value{
+				"unknown": cty.UnknownVal(cty.Number),
+			},
+			cty.DynamicVal, // can't proceed until first argument is known
+			``,
+		},
+		"two arguments, first succeeds and second depends on unknowns": {
+			`try(1, unknown)`,
+			map[string]cty.Value{
+				"unknown": cty.UnknownVal(cty.Number),
+			},
+			cty.NumberIntVal(1), // we know 1st succeeds, so it doesn't matter that 2nd is unknown
+			``,
+		},
+		"two arguments, first depends on unknowns deeply": {
+			`try(has_unknowns, 2)`,
+			map[string]cty.Value{
+				"has_unknowns": cty.ListVal([]cty.Value{cty.UnknownVal(cty.Bool)}),
+			},
+			cty.DynamicVal, // can't proceed until first argument is wholly known
+			``,
+		},
+		"two arguments, first traverses through an unkown": {
+			`try(unknown.baz, 2)`,
+			map[string]cty.Value{
+				"unknown": cty.UnknownVal(cty.Map(cty.String)),
+			},
+			cty.DynamicVal, // can't proceed until first argument is wholly known
+			``,
+		},
+		"two arguments, both marked, first succeeds": {
+			`try(sensitive, other)`,
+			map[string]cty.Value{
+				"sensitive": cty.StringVal("secret").Mark("porpoise"),
+				"other":     cty.StringVal("that").Mark("a"),
+			},
+			cty.StringVal("secret").Mark("porpoise"),
+			``,
+		},
+		"two arguments, both marked, second succeeds": {
+			`try(sensitive, other)`,
+			map[string]cty.Value{
+				"other": cty.StringVal("that").Mark("a"),
+			},
+			cty.StringVal("that").Mark("a"),
+			``,
+		},
+		"two arguments, result is element of marked list ": {
+			`try(sensitive[0], other)`,
+			map[string]cty.Value{
+				"sensitive": cty.ListVal([]cty.Value{
+					cty.StringVal("list"),
+					cty.StringVal("of "),
+					cty.StringVal("secrets"),
+				}).Mark("secret"),
+				"other": cty.StringVal("not"),
+			},
+			cty.StringVal("list").Mark("secret"),
+			``,
+		},
+		"three arguments, all fail": {
+			`try(this, that, this_thing_in_particular)`,
+			nil,
+			cty.NumberIntVal(2),
+			// The grammar of this stringification of the message is unfortunate,
+			// but caller can type-assert our result to get the original
+			// diagnostics directly in order to produce a better result.
+			`test.hcl:1,1-5: Error in function call; Call to function "try" failed: no expression succeeded:
+- Variables not allowed (at test.hcl:1,5-9)
+  Variables may not be used here.
+- Variables not allowed (at test.hcl:1,11-15)
+  Variables may not be used here.
+- Variables not allowed (at test.hcl:1,17-41)
+  Variables may not be used here.
+
+At least one expression must produce a successful result.`,
+		},
+		"no arguments": {
+			`try()`,
+			nil,
+			cty.NilVal,
+			`test.hcl:1,1-5: Error in function call; Call to function "try" failed: at least one argument is required.`,
+		},
+	}
+
+	for k, test := range tests {
+		t.Run(k, func(t *testing.T) {
+			expr, diags := hclsyntax.ParseExpression([]byte(test.expr), "test.hcl", hcl.Pos{Line: 1, Column: 1})
+			if diags.HasErrors() {
+				t.Fatalf("unexpected problems: %s", diags.Error())
+			}
+
+			ctx := &hcl.EvalContext{
+				Variables: test.vars,
+				Functions: map[string]function.Function{
+					"try": TryFunc,
+				},
+			}
+
+			got, err := expr.Value(ctx)
+
+			if err != nil {
+				if test.wantErr != "" {
+					if got, want := err.Error(), test.wantErr; got != want {
+						t.Errorf("wrong error\ngot:  %s\nwant: %s", got, want)
+					}
+				} else {
+					t.Errorf("unexpected error\ngot:  %s\nwant: <nil>", err)
+				}
+				return
+			}
+			if test.wantErr != "" {
+				t.Errorf("wrong error\ngot:  <nil>\nwant: %s", test.wantErr)
+			}
+
+			if !test.want.RawEquals(got) {
+				t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, test.want)
+			}
+		})
+	}
+}
+
+func TestCanFunc(t *testing.T) {
+	tests := map[string]struct {
+		expr string
+		vars map[string]cty.Value
+		want cty.Value
+	}{
+		"succeeds": {
+			`can(1)`,
+			nil,
+			cty.True,
+		},
+		"fails": {
+			`can(nope)`,
+			nil,
+			cty.False,
+		},
+		"simple unknown": {
+			`can(unknown)`,
+			map[string]cty.Value{
+				"unknown": cty.UnknownVal(cty.Number),
+			},
+			cty.UnknownVal(cty.Bool),
+		},
+		"traversal through unknown": {
+			`can(unknown.foo)`,
+			map[string]cty.Value{
+				"unknown": cty.UnknownVal(cty.Map(cty.Number)),
+			},
+			cty.UnknownVal(cty.Bool),
+		},
+		"deep unknown": {
+			`can(has_unknown)`,
+			map[string]cty.Value{
+				"has_unknown": cty.ListVal([]cty.Value{cty.UnknownVal(cty.Bool)}),
+			},
+			cty.UnknownVal(cty.Bool),
+		},
+	}
+
+	for k, test := range tests {
+		t.Run(k, func(t *testing.T) {
+			expr, diags := hclsyntax.ParseExpression([]byte(test.expr), "test.hcl", hcl.Pos{Line: 1, Column: 1})
+			if diags.HasErrors() {
+				t.Fatalf("unexpected problems: %s", diags.Error())
+			}
+
+			ctx := &hcl.EvalContext{
+				Variables: test.vars,
+				Functions: map[string]function.Function{
+					"can": CanFunc,
+				},
+			}
+
+			got, err := expr.Value(ctx)
+			if err != nil {
+				t.Errorf("unexpected error\ngot:  %s\nwant: <nil>", err)
+			}
+			if !test.want.RawEquals(got) {
+				t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, test.want)
+			}
+		})
+	}
+}
diff --git a/ext/typeexpr/README.md b/ext/typeexpr/README.md
new file mode 100644
index 0000000..058f1e3
--- /dev/null
+++ b/ext/typeexpr/README.md
@@ -0,0 +1,135 @@
+# HCL Type Expressions Extension
+
+This HCL extension defines a convention for describing HCL types using function
+call and variable reference syntax, allowing configuration formats to include
+type information provided by users.
+
+The type syntax is processed statically from a hcl.Expression, so it cannot
+use any of the usual language operators. This is similar to type expressions
+in statically-typed programming languages.
+
+```hcl
+variable "example" {
+  type = list(string)
+}
+```
+
+The extension is built using the `hcl.ExprAsKeyword` and `hcl.ExprCall`
+functions, and so it relies on the underlying syntax to define how "keyword"
+and "call" are interpreted. The above shows how they are interpreted in
+the HCL native syntax, while the following shows the same information
+expressed in JSON:
+
+```json
+{
+  "variable": {
+    "example": {
+      "type": "list(string)"
+    }
+  }
+}
+```
+
+Notice that since we have additional contextual information that we intend
+to allow only calls and keywords the JSON syntax is able to parse the given
+string directly as an expression, rather than as a template as would be
+the case for normal expression evaluation.
+
+For more information, see [the godoc reference](http://godoc.org/github.com/hashicorp/hcl/v2/ext/typeexpr).
+
+## Type Expression Syntax
+
+When expressed in the native syntax, the following expressions are permitted
+in a type expression:
+
+* `string` - string
+* `bool` - boolean
+* `number` - number
+* `any` - `cty.DynamicPseudoType` (in function `TypeConstraint` only)
+* `list(<type_expr>)` - list of the type given as an argument
+* `set(<type_expr>)` - set of the type given as an argument
+* `map(<type_expr>)` - map of the type given as an argument
+* `tuple([<type_exprs...>])` - tuple with the element types given in the single list argument
+* `object({<attr_name>=<type_expr>, ...}` - object with the attributes and corresponding types given in the single map argument
+
+For example:
+
+* `list(string)`
+* `object({name=string,age=number})`
+* `map(object({name=string,age=number}))`
+
+Note that the object constructor syntax is not fully-general for all possible
+object types because it requires the attribute names to be valid identifiers.
+In practice it is expected that any time an object type is being fixed for
+type checking it will be one that has identifiers as its attributes; object
+types with weird attributes generally show up only from arbitrary object
+constructors in configuration files, which are usually treated either as maps
+or as the dynamic pseudo-type.
+
+## Type Constraints as Values
+
+Along with defining a convention for writing down types using HCL expression
+constructs, this package also includes a mechanism for representing types as
+values that can be used as data within an HCL-based language.
+
+`typeexpr.TypeConstraintType` is a
+[`cty` capsule type](https://github.com/zclconf/go-cty/blob/master/docs/types.md#capsule-types)
+that encapsulates `cty.Type` values. You can construct such a value directly
+using the `TypeConstraintVal` function:
+
+```go
+tyVal := typeexpr.TypeConstraintVal(cty.String)
+
+// We can unpack the type from a value using TypeConstraintFromVal
+ty := typeExpr.TypeConstraintFromVal(tyVal)
+```
+
+However, the primary purpose of `typeexpr.TypeConstraintType` is to be
+specified as the type constraint for an argument, in which case it serves
+as a signal for HCL to treat the argument expression as a type constraint
+expression as defined above, rather than as a normal value expression.
+
+"An argument" in the above in practice means the following two locations:
+
+* As the type constraint for a parameter of a cty function that will be
+  used in an `hcl.EvalContext`. In that case, function calls in the HCL
+  native expression syntax will require the argument to be valid type constraint
+  expression syntax and the function implementation will receive a
+  `TypeConstraintType` value as the argument value for that parameter.
+
+* As the type constraint for a `hcldec.AttrSpec` or `hcldec.BlockAttrsSpec`
+  when decoding an HCL body using `hcldec`. In that case, the attributes
+  with that type constraint will be required to be valid type constraint
+  expression syntax and the result will be a `TypeConstraintType` value.
+
+Note that the special handling of these arguments means that an argument
+marked in this way must use the type constraint syntax directly. It is not
+valid to pass in a value of `TypeConstraintType` that has been obtained
+dynamically via some other expression result.
+
+`TypeConstraintType` is provided with the intent of using it internally within
+application code when incorporating type constraint expression syntax into
+an HCL-based language, not to be used for dynamic "programming with types". A
+calling application could support programming with types by defining its _own_
+capsule type, but that is not the purpose of `TypeConstraintType`.
+
+## The "convert" `cty` Function
+
+Building on the `TypeConstraintType` described in the previous section, this
+package also provides `typeexpr.ConvertFunc` which is a cty function that
+can be placed into a `cty.EvalContext` (conventionally named "convert") in
+order to provide a general type conversion function in an HCL-based language:
+
+```hcl
+  foo = convert("true", bool)
+```
+
+The second parameter uses the mechanism described in the previous section to
+require its argument to be a type constraint expression rather than a value
+expression. In doing so, it allows converting with any type constraint that
+can be expressed in this package's type constraint syntax. In the above example,
+the `foo` argument would receive a boolean true, or `cty.True` in `cty` terms.
+
+The target type constraint must always be provided statically using inline
+type constraint syntax. There is no way to _dynamically_ select a type
+constraint using this function.
diff --git a/ext/typeexpr/doc.go b/ext/typeexpr/doc.go
new file mode 100644
index 0000000..c4b3795
--- /dev/null
+++ b/ext/typeexpr/doc.go
@@ -0,0 +1,11 @@
+// Package typeexpr extends HCL with a convention for describing HCL types
+// within configuration files.
+//
+// The type syntax is processed statically from a hcl.Expression, so it cannot
+// use any of the usual language operators. This is similar to type expressions
+// in statically-typed programming languages.
+//
+//     variable "example" {
+//       type = list(string)
+//     }
+package typeexpr
diff --git a/ext/typeexpr/get_type.go b/ext/typeexpr/get_type.go
new file mode 100644
index 0000000..11b0689
--- /dev/null
+++ b/ext/typeexpr/get_type.go
@@ -0,0 +1,196 @@
+package typeexpr
+
+import (
+	"fmt"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+const invalidTypeSummary = "Invalid type specification"
+
+// getType is the internal implementation of both Type and TypeConstraint,
+// using the passed flag to distinguish. When constraint is false, the "any"
+// keyword will produce an error.
+func getType(expr hcl.Expression, constraint bool) (cty.Type, hcl.Diagnostics) {
+	// First we'll try for one of our keywords
+	kw := hcl.ExprAsKeyword(expr)
+	switch kw {
+	case "bool":
+		return cty.Bool, nil
+	case "string":
+		return cty.String, nil
+	case "number":
+		return cty.Number, nil
+	case "any":
+		if constraint {
+			return cty.DynamicPseudoType, nil
+		}
+		return cty.DynamicPseudoType, hcl.Diagnostics{{
+			Severity: hcl.DiagError,
+			Summary:  invalidTypeSummary,
+			Detail:   fmt.Sprintf("The keyword %q cannot be used in this type specification: an exact type is required.", kw),
+			Subject:  expr.Range().Ptr(),
+		}}
+	case "list", "map", "set":
+		return cty.DynamicPseudoType, hcl.Diagnostics{{
+			Severity: hcl.DiagError,
+			Summary:  invalidTypeSummary,
+			Detail:   fmt.Sprintf("The %s type constructor requires one argument specifying the element type.", kw),
+			Subject:  expr.Range().Ptr(),
+		}}
+	case "object":
+		return cty.DynamicPseudoType, hcl.Diagnostics{{
+			Severity: hcl.DiagError,
+			Summary:  invalidTypeSummary,
+			Detail:   "The object type constructor requires one argument specifying the attribute types and values as a map.",
+			Subject:  expr.Range().Ptr(),
+		}}
+	case "tuple":
+		return cty.DynamicPseudoType, hcl.Diagnostics{{
+			Severity: hcl.DiagError,
+			Summary:  invalidTypeSummary,
+			Detail:   "The tuple type constructor requires one argument specifying the element types as a list.",
+			Subject:  expr.Range().Ptr(),
+		}}
+	case "":
+		// okay! we'll fall through and try processing as a call, then.
+	default:
+		return cty.DynamicPseudoType, hcl.Diagnostics{{
+			Severity: hcl.DiagError,
+			Summary:  invalidTypeSummary,
+			Detail:   fmt.Sprintf("The keyword %q is not a valid type specification.", kw),
+			Subject:  expr.Range().Ptr(),
+		}}
+	}
+
+	// If we get down here then our expression isn't just a keyword, so we'll
+	// try to process it as a call instead.
+	call, diags := hcl.ExprCall(expr)
+	if diags.HasErrors() {
+		return cty.DynamicPseudoType, hcl.Diagnostics{{
+			Severity: hcl.DiagError,
+			Summary:  invalidTypeSummary,
+			Detail:   "A type specification is either a primitive type keyword (bool, number, string) or a complex type constructor call, like list(string).",
+			Subject:  expr.Range().Ptr(),
+		}}
+	}
+
+	switch call.Name {
+	case "bool", "string", "number", "any":
+		return cty.DynamicPseudoType, hcl.Diagnostics{{
+			Severity: hcl.DiagError,
+			Summary:  invalidTypeSummary,
+			Detail:   fmt.Sprintf("Primitive type keyword %q does not expect arguments.", call.Name),
+			Subject:  &call.ArgsRange,
+		}}
+	}
+
+	if len(call.Arguments) != 1 {
+		contextRange := call.ArgsRange
+		subjectRange := call.ArgsRange
+		if len(call.Arguments) > 1 {
+			// If we have too many arguments (as opposed to too _few_) then
+			// we'll highlight the extraneous arguments as the diagnostic
+			// subject.
+			subjectRange = hcl.RangeBetween(call.Arguments[1].Range(), call.Arguments[len(call.Arguments)-1].Range())
+		}
+
+		switch call.Name {
+		case "list", "set", "map":
+			return cty.DynamicPseudoType, hcl.Diagnostics{{
+				Severity: hcl.DiagError,
+				Summary:  invalidTypeSummary,
+				Detail:   fmt.Sprintf("The %s type constructor requires one argument specifying the element type.", call.Name),
+				Subject:  &subjectRange,
+				Context:  &contextRange,
+			}}
+		case "object":
+			return cty.DynamicPseudoType, hcl.Diagnostics{{
+				Severity: hcl.DiagError,
+				Summary:  invalidTypeSummary,
+				Detail:   "The object type constructor requires one argument specifying the attribute types and values as a map.",
+				Subject:  &subjectRange,
+				Context:  &contextRange,
+			}}
+		case "tuple":
+			return cty.DynamicPseudoType, hcl.Diagnostics{{
+				Severity: hcl.DiagError,
+				Summary:  invalidTypeSummary,
+				Detail:   "The tuple type constructor requires one argument specifying the element types as a list.",
+				Subject:  &subjectRange,
+				Context:  &contextRange,
+			}}
+		}
+	}
+
+	switch call.Name {
+
+	case "list":
+		ety, diags := getType(call.Arguments[0], constraint)
+		return cty.List(ety), diags
+	case "set":
+		ety, diags := getType(call.Arguments[0], constraint)
+		return cty.Set(ety), diags
+	case "map":
+		ety, diags := getType(call.Arguments[0], constraint)
+		return cty.Map(ety), diags
+	case "object":
+		attrDefs, diags := hcl.ExprMap(call.Arguments[0])
+		if diags.HasErrors() {
+			return cty.DynamicPseudoType, hcl.Diagnostics{{
+				Severity: hcl.DiagError,
+				Summary:  invalidTypeSummary,
+				Detail:   "Object type constructor requires a map whose keys are attribute names and whose values are the corresponding attribute types.",
+				Subject:  call.Arguments[0].Range().Ptr(),
+				Context:  expr.Range().Ptr(),
+			}}
+		}
+
+		atys := make(map[string]cty.Type)
+		for _, attrDef := range attrDefs {
+			attrName := hcl.ExprAsKeyword(attrDef.Key)
+			if attrName == "" {
+				diags = append(diags, &hcl.Diagnostic{
+					Severity: hcl.DiagError,
+					Summary:  invalidTypeSummary,
+					Detail:   "Object constructor map keys must be attribute names.",
+					Subject:  attrDef.Key.Range().Ptr(),
+					Context:  expr.Range().Ptr(),
+				})
+				continue
+			}
+			aty, attrDiags := getType(attrDef.Value, constraint)
+			diags = append(diags, attrDiags...)
+			atys[attrName] = aty
+		}
+		return cty.Object(atys), diags
+	case "tuple":
+		elemDefs, diags := hcl.ExprList(call.Arguments[0])
+		if diags.HasErrors() {
+			return cty.DynamicPseudoType, hcl.Diagnostics{{
+				Severity: hcl.DiagError,
+				Summary:  invalidTypeSummary,
+				Detail:   "Tuple type constructor requires a list of element types.",
+				Subject:  call.Arguments[0].Range().Ptr(),
+				Context:  expr.Range().Ptr(),
+			}}
+		}
+		etys := make([]cty.Type, len(elemDefs))
+		for i, defExpr := range elemDefs {
+			ety, elemDiags := getType(defExpr, constraint)
+			diags = append(diags, elemDiags...)
+			etys[i] = ety
+		}
+		return cty.Tuple(etys), diags
+	default:
+		// Can't access call.Arguments in this path because we've not validated
+		// that it contains exactly one expression here.
+		return cty.DynamicPseudoType, hcl.Diagnostics{{
+			Severity: hcl.DiagError,
+			Summary:  invalidTypeSummary,
+			Detail:   fmt.Sprintf("Keyword %q is not a valid type constructor.", call.Name),
+			Subject:  expr.Range().Ptr(),
+		}}
+	}
+}
diff --git a/ext/typeexpr/get_type_test.go b/ext/typeexpr/get_type_test.go
new file mode 100644
index 0000000..391bf4f
--- /dev/null
+++ b/ext/typeexpr/get_type_test.go
@@ -0,0 +1,352 @@
+package typeexpr
+
+import (
+	"testing"
+
+	"github.com/hashicorp/hcl/v2/gohcl"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hclsyntax"
+	"github.com/hashicorp/hcl/v2/json"
+	"github.com/zclconf/go-cty/cty"
+)
+
+func TestGetType(t *testing.T) {
+	tests := []struct {
+		Source     string
+		Constraint bool
+		Want       cty.Type
+		WantError  string
+	}{
+		// keywords
+		{
+			`bool`,
+			false,
+			cty.Bool,
+			"",
+		},
+		{
+			`number`,
+			false,
+			cty.Number,
+			"",
+		},
+		{
+			`string`,
+			false,
+			cty.String,
+			"",
+		},
+		{
+			`any`,
+			false,
+			cty.DynamicPseudoType,
+			`The keyword "any" cannot be used in this type specification: an exact type is required.`,
+		},
+		{
+			`any`,
+			true,
+			cty.DynamicPseudoType,
+			"",
+		},
+		{
+			`list`,
+			false,
+			cty.DynamicPseudoType,
+			"The list type constructor requires one argument specifying the element type.",
+		},
+		{
+			`map`,
+			false,
+			cty.DynamicPseudoType,
+			"The map type constructor requires one argument specifying the element type.",
+		},
+		{
+			`set`,
+			false,
+			cty.DynamicPseudoType,
+			"The set type constructor requires one argument specifying the element type.",
+		},
+		{
+			`object`,
+			false,
+			cty.DynamicPseudoType,
+			"The object type constructor requires one argument specifying the attribute types and values as a map.",
+		},
+		{
+			`tuple`,
+			false,
+			cty.DynamicPseudoType,
+			"The tuple type constructor requires one argument specifying the element types as a list.",
+		},
+
+		// constructors
+		{
+			`bool()`,
+			false,
+			cty.DynamicPseudoType,
+			`Primitive type keyword "bool" does not expect arguments.`,
+		},
+		{
+			`number()`,
+			false,
+			cty.DynamicPseudoType,
+			`Primitive type keyword "number" does not expect arguments.`,
+		},
+		{
+			`string()`,
+			false,
+			cty.DynamicPseudoType,
+			`Primitive type keyword "string" does not expect arguments.`,
+		},
+		{
+			`any()`,
+			false,
+			cty.DynamicPseudoType,
+			`Primitive type keyword "any" does not expect arguments.`,
+		},
+		{
+			`any()`,
+			true,
+			cty.DynamicPseudoType,
+			`Primitive type keyword "any" does not expect arguments.`,
+		},
+		{
+			`list(string)`,
+			false,
+			cty.List(cty.String),
+			``,
+		},
+		{
+			`set(string)`,
+			false,
+			cty.Set(cty.String),
+			``,
+		},
+		{
+			`map(string)`,
+			false,
+			cty.Map(cty.String),
+			``,
+		},
+		{
+			`list()`,
+			false,
+			cty.DynamicPseudoType,
+			`The list type constructor requires one argument specifying the element type.`,
+		},
+		{
+			`list(string, string)`,
+			false,
+			cty.DynamicPseudoType,
+			`The list type constructor requires one argument specifying the element type.`,
+		},
+		{
+			`list(any)`,
+			false,
+			cty.List(cty.DynamicPseudoType),
+			`The keyword "any" cannot be used in this type specification: an exact type is required.`,
+		},
+		{
+			`list(any)`,
+			true,
+			cty.List(cty.DynamicPseudoType),
+			``,
+		},
+		{
+			`object({})`,
+			false,
+			cty.EmptyObject,
+			``,
+		},
+		{
+			`object({name=string})`,
+			false,
+			cty.Object(map[string]cty.Type{"name": cty.String}),
+			``,
+		},
+		{
+			`object({"name"=string})`,
+			false,
+			cty.EmptyObject,
+			`Object constructor map keys must be attribute names.`,
+		},
+		{
+			`object({name=nope})`,
+			false,
+			cty.Object(map[string]cty.Type{"name": cty.DynamicPseudoType}),
+			`The keyword "nope" is not a valid type specification.`,
+		},
+		{
+			`object()`,
+			false,
+			cty.DynamicPseudoType,
+			`The object type constructor requires one argument specifying the attribute types and values as a map.`,
+		},
+		{
+			`object(string)`,
+			false,
+			cty.DynamicPseudoType,
+			`Object type constructor requires a map whose keys are attribute names and whose values are the corresponding attribute types.`,
+		},
+		{
+			`tuple([])`,
+			false,
+			cty.EmptyTuple,
+			``,
+		},
+		{
+			`tuple([string, bool])`,
+			false,
+			cty.Tuple([]cty.Type{cty.String, cty.Bool}),
+			``,
+		},
+		{
+			`tuple([nope])`,
+			false,
+			cty.Tuple([]cty.Type{cty.DynamicPseudoType}),
+			`The keyword "nope" is not a valid type specification.`,
+		},
+		{
+			`tuple()`,
+			false,
+			cty.DynamicPseudoType,
+			`The tuple type constructor requires one argument specifying the element types as a list.`,
+		},
+		{
+			`tuple(string)`,
+			false,
+			cty.DynamicPseudoType,
+			`Tuple type constructor requires a list of element types.`,
+		},
+		{
+			`shwoop(string)`,
+			false,
+			cty.DynamicPseudoType,
+			`Keyword "shwoop" is not a valid type constructor.`,
+		},
+		{
+			`list("string")`,
+			false,
+			cty.List(cty.DynamicPseudoType),
+			`A type specification is either a primitive type keyword (bool, number, string) or a complex type constructor call, like list(string).`,
+		},
+
+		// More interesting combinations
+		{
+			`list(object({}))`,
+			false,
+			cty.List(cty.EmptyObject),
+			``,
+		},
+		{
+			`list(map(tuple([])))`,
+			false,
+			cty.List(cty.Map(cty.EmptyTuple)),
+			``,
+		},
+	}
+
+	for _, test := range tests {
+		t.Run(test.Source, func(t *testing.T) {
+			expr, diags := hclsyntax.ParseExpression([]byte(test.Source), "", hcl.Pos{Line: 1, Column: 1})
+			if diags.HasErrors() {
+				t.Fatalf("failed to parse: %s", diags)
+			}
+
+			got, diags := getType(expr, test.Constraint)
+			if test.WantError == "" {
+				for _, diag := range diags {
+					t.Error(diag)
+				}
+			} else {
+				found := false
+				for _, diag := range diags {
+					t.Log(diag)
+					if diag.Severity == hcl.DiagError && diag.Detail == test.WantError {
+						found = true
+					}
+				}
+				if !found {
+					t.Errorf("missing expected error detail message: %s", test.WantError)
+				}
+			}
+
+			if !got.Equals(test.Want) {
+				t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, test.Want)
+			}
+		})
+	}
+}
+
+func TestGetTypeJSON(t *testing.T) {
+	// We have fewer test cases here because we're mainly exercising the
+	// extra indirection in the JSON syntax package, which ultimately calls
+	// into the native syntax parser (which we tested extensively in
+	// TestGetType).
+	tests := []struct {
+		Source     string
+		Constraint bool
+		Want       cty.Type
+		WantError  string
+	}{
+		{
+			`{"expr":"bool"}`,
+			false,
+			cty.Bool,
+			"",
+		},
+		{
+			`{"expr":"list(bool)"}`,
+			false,
+			cty.List(cty.Bool),
+			"",
+		},
+		{
+			`{"expr":"list"}`,
+			false,
+			cty.DynamicPseudoType,
+			"The list type constructor requires one argument specifying the element type.",
+		},
+	}
+
+	for _, test := range tests {
+		t.Run(test.Source, func(t *testing.T) {
+			file, diags := json.Parse([]byte(test.Source), "")
+			if diags.HasErrors() {
+				t.Fatalf("failed to parse: %s", diags)
+			}
+
+			type TestContent struct {
+				Expr hcl.Expression `hcl:"expr"`
+			}
+			var content TestContent
+			diags = gohcl.DecodeBody(file.Body, nil, &content)
+			if diags.HasErrors() {
+				t.Fatalf("failed to decode: %s", diags)
+			}
+
+			got, diags := getType(content.Expr, test.Constraint)
+			if test.WantError == "" {
+				for _, diag := range diags {
+					t.Error(diag)
+				}
+			} else {
+				found := false
+				for _, diag := range diags {
+					t.Log(diag)
+					if diag.Severity == hcl.DiagError && diag.Detail == test.WantError {
+						found = true
+					}
+				}
+				if !found {
+					t.Errorf("missing expected error detail message: %s", test.WantError)
+				}
+			}
+
+			if !got.Equals(test.Want) {
+				t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, test.Want)
+			}
+		})
+	}
+}
diff --git a/ext/typeexpr/public.go b/ext/typeexpr/public.go
new file mode 100644
index 0000000..3b8f618
--- /dev/null
+++ b/ext/typeexpr/public.go
@@ -0,0 +1,129 @@
+package typeexpr
+
+import (
+	"bytes"
+	"fmt"
+	"sort"
+
+	"github.com/hashicorp/hcl/v2/hclsyntax"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+// Type attempts to process the given expression as a type expression and, if
+// successful, returns the resulting type. If unsuccessful, error diagnostics
+// are returned.
+func Type(expr hcl.Expression) (cty.Type, hcl.Diagnostics) {
+	return getType(expr, false)
+}
+
+// TypeConstraint attempts to parse the given expression as a type constraint
+// and, if successful, returns the resulting type. If unsuccessful, error
+// diagnostics are returned.
+//
+// A type constraint has the same structure as a type, but it additionally
+// allows the keyword "any" to represent cty.DynamicPseudoType, which is often
+// used as a wildcard in type checking and type conversion operations.
+func TypeConstraint(expr hcl.Expression) (cty.Type, hcl.Diagnostics) {
+	return getType(expr, true)
+}
+
+// TypeString returns a string rendering of the given type as it would be
+// expected to appear in the HCL native syntax.
+//
+// This is primarily intended for showing types to the user in an application
+// that uses typexpr, where the user can be assumed to be familiar with the
+// type expression syntax. In applications that do not use typeexpr these
+// results may be confusing to the user and so type.FriendlyName may be
+// preferable, even though it's less precise.
+//
+// TypeString produces reasonable results only for types like what would be
+// produced by the Type and TypeConstraint functions. In particular, it cannot
+// support capsule types.
+func TypeString(ty cty.Type) string {
+	// Easy cases first
+	switch ty {
+	case cty.String:
+		return "string"
+	case cty.Bool:
+		return "bool"
+	case cty.Number:
+		return "number"
+	case cty.DynamicPseudoType:
+		return "any"
+	}
+
+	if ty.IsCapsuleType() {
+		panic("TypeString does not support capsule types")
+	}
+
+	if ty.IsCollectionType() {
+		ety := ty.ElementType()
+		etyString := TypeString(ety)
+		switch {
+		case ty.IsListType():
+			return fmt.Sprintf("list(%s)", etyString)
+		case ty.IsSetType():
+			return fmt.Sprintf("set(%s)", etyString)
+		case ty.IsMapType():
+			return fmt.Sprintf("map(%s)", etyString)
+		default:
+			// Should never happen because the above is exhaustive
+			panic("unsupported collection type")
+		}
+	}
+
+	if ty.IsObjectType() {
+		var buf bytes.Buffer
+		buf.WriteString("object({")
+		atys := ty.AttributeTypes()
+		names := make([]string, 0, len(atys))
+		for name := range atys {
+			names = append(names, name)
+		}
+		sort.Strings(names)
+		first := true
+		for _, name := range names {
+			aty := atys[name]
+			if !first {
+				buf.WriteByte(',')
+			}
+			if !hclsyntax.ValidIdentifier(name) {
+				// Should never happen for any type produced by this package,
+				// but we'll do something reasonable here just so we don't
+				// produce garbage if someone gives us a hand-assembled object
+				// type that has weird attribute names.
+				// Using Go-style quoting here isn't perfect, since it doesn't
+				// exactly match HCL syntax, but it's fine for an edge-case.
+				buf.WriteString(fmt.Sprintf("%q", name))
+			} else {
+				buf.WriteString(name)
+			}
+			buf.WriteByte('=')
+			buf.WriteString(TypeString(aty))
+			first = false
+		}
+		buf.WriteString("})")
+		return buf.String()
+	}
+
+	if ty.IsTupleType() {
+		var buf bytes.Buffer
+		buf.WriteString("tuple([")
+		etys := ty.TupleElementTypes()
+		first := true
+		for _, ety := range etys {
+			if !first {
+				buf.WriteByte(',')
+			}
+			buf.WriteString(TypeString(ety))
+			first = false
+		}
+		buf.WriteString("])")
+		return buf.String()
+	}
+
+	// Should never happen because we covered all cases above.
+	panic(fmt.Errorf("unsupported type %#v", ty))
+}
diff --git a/ext/typeexpr/type_string_test.go b/ext/typeexpr/type_string_test.go
new file mode 100644
index 0000000..fbdf3f4
--- /dev/null
+++ b/ext/typeexpr/type_string_test.go
@@ -0,0 +1,100 @@
+package typeexpr
+
+import (
+	"testing"
+
+	"github.com/zclconf/go-cty/cty"
+)
+
+func TestTypeString(t *testing.T) {
+	tests := []struct {
+		Type cty.Type
+		Want string
+	}{
+		{
+			cty.DynamicPseudoType,
+			"any",
+		},
+		{
+			cty.String,
+			"string",
+		},
+		{
+			cty.Number,
+			"number",
+		},
+		{
+			cty.Bool,
+			"bool",
+		},
+		{
+			cty.List(cty.Number),
+			"list(number)",
+		},
+		{
+			cty.Set(cty.Bool),
+			"set(bool)",
+		},
+		{
+			cty.Map(cty.String),
+			"map(string)",
+		},
+		{
+			cty.EmptyObject,
+			"object({})",
+		},
+		{
+			cty.Object(map[string]cty.Type{"foo": cty.Bool}),
+			"object({foo=bool})",
+		},
+		{
+			cty.Object(map[string]cty.Type{"foo": cty.Bool, "bar": cty.String}),
+			"object({bar=string,foo=bool})",
+		},
+		{
+			cty.EmptyTuple,
+			"tuple([])",
+		},
+		{
+			cty.Tuple([]cty.Type{cty.Bool}),
+			"tuple([bool])",
+		},
+		{
+			cty.Tuple([]cty.Type{cty.Bool, cty.String}),
+			"tuple([bool,string])",
+		},
+		{
+			cty.List(cty.DynamicPseudoType),
+			"list(any)",
+		},
+		{
+			cty.Tuple([]cty.Type{cty.DynamicPseudoType}),
+			"tuple([any])",
+		},
+		{
+			cty.Object(map[string]cty.Type{"foo": cty.DynamicPseudoType}),
+			"object({foo=any})",
+		},
+		{
+			// We don't expect to find attributes that aren't valid identifiers
+			// because we only promise to support types that this package
+			// would've created, but we allow this situation during rendering
+			// just because it's convenient for applications trying to produce
+			// error messages about mismatched types. Note that the quoted
+			// attribute name is not actually accepted by our Type and
+			// TypeConstraint functions, so this is one situation where the
+			// TypeString result cannot be re-parsed by those functions.
+			cty.Object(map[string]cty.Type{"foo bar baz": cty.String}),
+			`object({"foo bar baz"=string})`,
+		},
+	}
+
+	for _, test := range tests {
+		t.Run(test.Type.GoString(), func(t *testing.T) {
+			got := TypeString(test.Type)
+			if got != test.Want {
+				t.Errorf("wrong result\ntype: %#v\ngot:  %s\nwant: %s", test.Type, got, test.Want)
+			}
+		})
+	}
+}
diff --git a/ext/typeexpr/type_type.go b/ext/typeexpr/type_type.go
new file mode 100644
index 0000000..5462d82
--- /dev/null
+++ b/ext/typeexpr/type_type.go
@@ -0,0 +1,118 @@
+package typeexpr
+
+import (
+	"fmt"
+	"reflect"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/ext/customdecode"
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/convert"
+	"github.com/zclconf/go-cty/cty/function"
+)
+
+// TypeConstraintType is a cty capsule type that allows cty type constraints to
+// be used as values.
+//
+// If TypeConstraintType is used in a context supporting the
+// customdecode.CustomExpressionDecoder extension then it will implement
+// expression decoding using the TypeConstraint function, thus allowing
+// type expressions to be used in contexts where value expressions might
+// normally be expected, such as in arguments to function calls.
+var TypeConstraintType cty.Type
+
+// TypeConstraintVal constructs a cty.Value whose type is
+// TypeConstraintType.
+func TypeConstraintVal(ty cty.Type) cty.Value {
+	return cty.CapsuleVal(TypeConstraintType, &ty)
+}
+
+// TypeConstraintFromVal extracts the type from a cty.Value of
+// TypeConstraintType that was previously constructed using TypeConstraintVal.
+//
+// If the given value isn't a known, non-null value of TypeConstraintType
+// then this function will panic.
+func TypeConstraintFromVal(v cty.Value) cty.Type {
+	if !v.Type().Equals(TypeConstraintType) {
+		panic("value is not of TypeConstraintType")
+	}
+	ptr := v.EncapsulatedValue().(*cty.Type)
+	return *ptr
+}
+
+// ConvertFunc is a cty function that implements type conversions.
+//
+// Its signature is as follows:
+//     convert(value, type_constraint)
+//
+// ...where type_constraint is a type constraint expression as defined by
+// typeexpr.TypeConstraint.
+//
+// It relies on HCL's customdecode extension and so it's not suitable for use
+// in non-HCL contexts or if you are using a HCL syntax implementation that
+// does not support customdecode for function arguments. However, it _is_
+// supported for function calls in the HCL native expression syntax.
+var ConvertFunc function.Function
+
+func init() {
+	TypeConstraintType = cty.CapsuleWithOps("type constraint", reflect.TypeOf(cty.Type{}), &cty.CapsuleOps{
+		ExtensionData: func(key interface{}) interface{} {
+			switch key {
+			case customdecode.CustomExpressionDecoder:
+				return customdecode.CustomExpressionDecoderFunc(
+					func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+						ty, diags := TypeConstraint(expr)
+						if diags.HasErrors() {
+							return cty.NilVal, diags
+						}
+						return TypeConstraintVal(ty), nil
+					},
+				)
+			default:
+				return nil
+			}
+		},
+		TypeGoString: func(_ reflect.Type) string {
+			return "typeexpr.TypeConstraintType"
+		},
+		GoString: func(raw interface{}) string {
+			tyPtr := raw.(*cty.Type)
+			return fmt.Sprintf("typeexpr.TypeConstraintVal(%#v)", *tyPtr)
+		},
+		RawEquals: func(a, b interface{}) bool {
+			aPtr := a.(*cty.Type)
+			bPtr := b.(*cty.Type)
+			return (*aPtr).Equals(*bPtr)
+		},
+	})
+
+	ConvertFunc = function.New(&function.Spec{
+		Params: []function.Parameter{
+			{
+				Name:             "value",
+				Type:             cty.DynamicPseudoType,
+				AllowNull:        true,
+				AllowDynamicType: true,
+			},
+			{
+				Name: "type",
+				Type: TypeConstraintType,
+			},
+		},
+		Type: func(args []cty.Value) (cty.Type, error) {
+			wantTypePtr := args[1].EncapsulatedValue().(*cty.Type)
+			got, err := convert.Convert(args[0], *wantTypePtr)
+			if err != nil {
+				return cty.NilType, function.NewArgError(0, err)
+			}
+			return got.Type(), nil
+		},
+		Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+			v, err := convert.Convert(args[0], retType)
+			if err != nil {
+				return cty.NilVal, function.NewArgError(0, err)
+			}
+			return v, nil
+		},
+	})
+}
diff --git a/ext/typeexpr/type_type_test.go b/ext/typeexpr/type_type_test.go
new file mode 100644
index 0000000..2286a2e
--- /dev/null
+++ b/ext/typeexpr/type_type_test.go
@@ -0,0 +1,118 @@
+package typeexpr
+
+import (
+	"fmt"
+	"testing"
+
+	"github.com/zclconf/go-cty/cty"
+)
+
+func TestTypeConstraintType(t *testing.T) {
+	tyVal1 := TypeConstraintVal(cty.String)
+	tyVal2 := TypeConstraintVal(cty.String)
+	tyVal3 := TypeConstraintVal(cty.Number)
+
+	if !tyVal1.RawEquals(tyVal2) {
+		t.Errorf("tyVal1 not equal to tyVal2\ntyVal1: %#v\ntyVal2: %#v", tyVal1, tyVal2)
+	}
+	if tyVal1.RawEquals(tyVal3) {
+		t.Errorf("tyVal1 equal to tyVal2, but should not be\ntyVal1: %#v\ntyVal3: %#v", tyVal1, tyVal3)
+	}
+
+	if got, want := TypeConstraintFromVal(tyVal1), cty.String; !got.Equals(want) {
+		t.Errorf("wrong type extracted from tyVal1\ngot:  %#v\nwant: %#v", got, want)
+	}
+	if got, want := TypeConstraintFromVal(tyVal3), cty.Number; !got.Equals(want) {
+		t.Errorf("wrong type extracted from tyVal3\ngot:  %#v\nwant: %#v", got, want)
+	}
+}
+
+func TestConvertFunc(t *testing.T) {
+	// This is testing the convert function directly, skipping over the HCL
+	// parsing and evaluation steps that would normally lead there. There is
+	// another test in the "integrationtest" package called TestTypeConvertFunc
+	// that exercises the full path to this function via the hclsyntax parser.
+
+	tests := []struct {
+		val, ty cty.Value
+		want    cty.Value
+		wantErr string
+	}{
+		// The goal here is not an exhaustive set of conversions, since that's
+		// already covered in cty/convert, but rather exercising different
+		// permutations of success and failure to make sure the function
+		// handles all of the results in a reasonable way.
+		{
+			cty.StringVal("hello"),
+			TypeConstraintVal(cty.String),
+			cty.StringVal("hello"),
+			``,
+		},
+		{
+			cty.True,
+			TypeConstraintVal(cty.String),
+			cty.StringVal("true"),
+			``,
+		},
+		{
+			cty.StringVal("hello"),
+			TypeConstraintVal(cty.Bool),
+			cty.NilVal,
+			`a bool is required`,
+		},
+		{
+			cty.UnknownVal(cty.Bool),
+			TypeConstraintVal(cty.Bool),
+			cty.UnknownVal(cty.Bool),
+			``,
+		},
+		{
+			cty.DynamicVal,
+			TypeConstraintVal(cty.Bool),
+			cty.UnknownVal(cty.Bool),
+			``,
+		},
+		{
+			cty.NullVal(cty.Bool),
+			TypeConstraintVal(cty.Bool),
+			cty.NullVal(cty.Bool),
+			``,
+		},
+		{
+			cty.NullVal(cty.DynamicPseudoType),
+			TypeConstraintVal(cty.Bool),
+			cty.NullVal(cty.Bool),
+			``,
+		},
+		{
+			cty.StringVal("hello").Mark(1),
+			TypeConstraintVal(cty.String),
+			cty.StringVal("hello").Mark(1),
+			``,
+		},
+	}
+
+	for _, test := range tests {
+		t.Run(fmt.Sprintf("%#v to %#v", test.val, test.ty), func(t *testing.T) {
+			got, err := ConvertFunc.Call([]cty.Value{test.val, test.ty})
+
+			if err != nil {
+				if test.wantErr != "" {
+					if got, want := err.Error(), test.wantErr; got != want {
+						t.Errorf("wrong error\ngot:  %s\nwant: %s", got, want)
+					}
+				} else {
+					t.Errorf("unexpected error\ngot:  %s\nwant: <nil>", err)
+				}
+				return
+			}
+			if test.wantErr != "" {
+				t.Errorf("wrong error\ngot:  <nil>\nwant: %s", test.wantErr)
+			}
+
+			if !test.want.RawEquals(got) {
+				t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, test.want)
+			}
+		})
+	}
+}
diff --git a/ext/userfunc/README.md b/ext/userfunc/README.md
new file mode 100644
index 0000000..b0c726d
--- /dev/null
+++ b/ext/userfunc/README.md
@@ -0,0 +1,28 @@
+# HCL User Functions Extension
+
+This HCL extension allows a calling application to support user-defined
+functions.
+
+Functions are defined via a specific block type, like this:
+
+```hcl
+function "add" {
+  params = [a, b]
+  result = a + b
+}
+
+function "list" {
+  params         = []
+  variadic_param = items
+  result         = items
+}
+```
+
+The extension is implemented as a pre-processor for `cty.Body` objects. Given
+a body that may contain functions, the `DecodeUserFunctions` function searches
+for blocks that define functions and returns a functions map suitable for
+inclusion in a `hcl.EvalContext`. It also returns a new `cty.Body` that
+contains the remainder of the content from the given body, allowing for
+further processing of remaining content.
+
+For more information, see [the godoc reference](https://pkg.go.dev/github.com/hashicorp/hcl/v2/ext/userfunc?tab=doc).
diff --git a/ext/userfunc/decode.go b/ext/userfunc/decode.go
new file mode 100644
index 0000000..6c1e4ca
--- /dev/null
+++ b/ext/userfunc/decode.go
@@ -0,0 +1,156 @@
+package userfunc
+
+import (
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+	"github.com/zclconf/go-cty/cty/function"
+)
+
+var funcBodySchema = &hcl.BodySchema{
+	Attributes: []hcl.AttributeSchema{
+		{
+			Name:     "params",
+			Required: true,
+		},
+		{
+			Name:     "variadic_param",
+			Required: false,
+		},
+		{
+			Name:     "result",
+			Required: true,
+		},
+	},
+}
+
+func decodeUserFunctions(body hcl.Body, blockType string, contextFunc ContextFunc) (funcs map[string]function.Function, remain hcl.Body, diags hcl.Diagnostics) {
+	schema := &hcl.BodySchema{
+		Blocks: []hcl.BlockHeaderSchema{
+			{
+				Type:       blockType,
+				LabelNames: []string{"name"},
+			},
+		},
+	}
+
+	content, remain, diags := body.PartialContent(schema)
+	if diags.HasErrors() {
+		return nil, remain, diags
+	}
+
+	// first call to getBaseCtx will populate context, and then the same
+	// context will be used for all subsequent calls. It's assumed that
+	// all functions in a given body should see an identical context.
+	var baseCtx *hcl.EvalContext
+	getBaseCtx := func() *hcl.EvalContext {
+		if baseCtx == nil {
+			if contextFunc != nil {
+				baseCtx = contextFunc()
+			}
+		}
+		// baseCtx might still be nil here, and that's okay
+		return baseCtx
+	}
+
+	funcs = make(map[string]function.Function)
+Blocks:
+	for _, block := range content.Blocks {
+		name := block.Labels[0]
+		funcContent, funcDiags := block.Body.Content(funcBodySchema)
+		diags = append(diags, funcDiags...)
+		if funcDiags.HasErrors() {
+			continue
+		}
+
+		paramsExpr := funcContent.Attributes["params"].Expr
+		resultExpr := funcContent.Attributes["result"].Expr
+		var varParamExpr hcl.Expression
+		if funcContent.Attributes["variadic_param"] != nil {
+			varParamExpr = funcContent.Attributes["variadic_param"].Expr
+		}
+
+		var params []string
+		var varParam string
+
+		paramExprs, paramsDiags := hcl.ExprList(paramsExpr)
+		diags = append(diags, paramsDiags...)
+		if paramsDiags.HasErrors() {
+			continue
+		}
+		for _, paramExpr := range paramExprs {
+			param := hcl.ExprAsKeyword(paramExpr)
+			if param == "" {
+				diags = append(diags, &hcl.Diagnostic{
+					Severity: hcl.DiagError,
+					Summary:  "Invalid param element",
+					Detail:   "Each parameter name must be an identifier.",
+					Subject:  paramExpr.Range().Ptr(),
+				})
+				continue Blocks
+			}
+			params = append(params, param)
+		}
+
+		if varParamExpr != nil {
+			varParam = hcl.ExprAsKeyword(varParamExpr)
+			if varParam == "" {
+				diags = append(diags, &hcl.Diagnostic{
+					Severity: hcl.DiagError,
+					Summary:  "Invalid variadic_param",
+					Detail:   "The variadic parameter name must be an identifier.",
+					Subject:  varParamExpr.Range().Ptr(),
+				})
+				continue
+			}
+		}
+
+		spec := &function.Spec{}
+		for _, paramName := range params {
+			spec.Params = append(spec.Params, function.Parameter{
+				Name: paramName,
+				Type: cty.DynamicPseudoType,
+			})
+		}
+		if varParamExpr != nil {
+			spec.VarParam = &function.Parameter{
+				Name: varParam,
+				Type: cty.DynamicPseudoType,
+			}
+		}
+		impl := func(args []cty.Value) (cty.Value, error) {
+			ctx := getBaseCtx()
+			ctx = ctx.NewChild()
+			ctx.Variables = make(map[string]cty.Value)
+
+			// The cty function machinery guarantees that we have at least
+			// enough args to fill all of our params.
+			for i, paramName := range params {
+				ctx.Variables[paramName] = args[i]
+			}
+			if spec.VarParam != nil {
+				varArgs := args[len(params):]
+				ctx.Variables[varParam] = cty.TupleVal(varArgs)
+			}
+
+			result, diags := resultExpr.Value(ctx)
+			if diags.HasErrors() {
+				// Smuggle the diagnostics out via the error channel, since
+				// a diagnostics sequence implements error. Caller can
+				// type-assert this to recover the individual diagnostics
+				// if desired.
+				return cty.DynamicVal, diags
+			}
+			return result, nil
+		}
+		spec.Type = func(args []cty.Value) (cty.Type, error) {
+			val, err := impl(args)
+			return val.Type(), err
+		}
+		spec.Impl = func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+			return impl(args)
+		}
+		funcs[name] = function.New(spec)
+	}
+
+	return funcs, remain, diags
+}
diff --git a/ext/userfunc/decode_test.go b/ext/userfunc/decode_test.go
new file mode 100644
index 0000000..633c6d8
--- /dev/null
+++ b/ext/userfunc/decode_test.go
@@ -0,0 +1,174 @@
+package userfunc
+
+import (
+	"fmt"
+	"testing"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hclsyntax"
+	"github.com/zclconf/go-cty/cty"
+)
+
+func TestDecodeUserFunctions(t *testing.T) {
+	tests := []struct {
+		src       string
+		testExpr  string
+		baseCtx   *hcl.EvalContext
+		want      cty.Value
+		diagCount int
+	}{
+		{
+			`
+function "greet" {
+  params = [name]
+  result = "Hello, ${name}."
+}
+`,
+			`greet("Ermintrude")`,
+			nil,
+			cty.StringVal("Hello, Ermintrude."),
+			0,
+		},
+		{
+			`
+function "greet" {
+  params = [name]
+  result = "Hello, ${name}."
+}
+`,
+			`greet()`,
+			nil,
+			cty.DynamicVal,
+			1, // missing value for "name"
+		},
+		{
+			`
+function "greet" {
+  params = [name]
+  result = "Hello, ${name}."
+}
+`,
+			`greet("Ermintrude", "extra")`,
+			nil,
+			cty.DynamicVal,
+			1, // too many arguments
+		},
+		{
+			`
+function "add" {
+  params = [a, b]
+  result = a + b
+}
+`,
+			`add(1, 5)`,
+			nil,
+			cty.NumberIntVal(6),
+			0,
+		},
+		{
+			`
+function "argstuple" {
+  params = []
+  variadic_param = args
+  result = args
+}
+`,
+			`argstuple("a", true, 1)`,
+			nil,
+			cty.TupleVal([]cty.Value{cty.StringVal("a"), cty.True, cty.NumberIntVal(1)}),
+			0,
+		},
+		{
+			`
+function "missing_var" {
+  params = []
+  result = nonexist
+}
+`,
+			`missing_var()`,
+			nil,
+			cty.DynamicVal,
+			1, // no variable named "nonexist"
+		},
+		{
+			`
+function "closure" {
+  params = []
+  result = upvalue
+}
+`,
+			`closure()`,
+			&hcl.EvalContext{
+				Variables: map[string]cty.Value{
+					"upvalue": cty.True,
+				},
+			},
+			cty.True,
+			0,
+		},
+		{
+			`
+function "neg" {
+  params = [val]
+  result = -val
+}
+function "add" {
+  params = [a, b]
+  result = a + b
+}
+`,
+			`neg(add(1, 3))`,
+			nil,
+			cty.NumberIntVal(-4),
+			0,
+		},
+		{
+			`
+function "neg" {
+  parrams = [val]
+  result = -val
+}
+`,
+			`null`,
+			nil,
+			cty.NullVal(cty.DynamicPseudoType),
+			2, // missing attribute "params", and unknown attribute "parrams"
+		},
+	}
+
+	for i, test := range tests {
+		t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) {
+			f, diags := hclsyntax.ParseConfig([]byte(test.src), "config", hcl.Pos{Line: 1, Column: 1})
+			if f == nil || f.Body == nil {
+				t.Fatalf("got nil file or body")
+			}
+
+			funcs, _, funcsDiags := decodeUserFunctions(f.Body, "function", func() *hcl.EvalContext {
+				return test.baseCtx
+			})
+			diags = append(diags, funcsDiags...)
+
+			expr, exprParseDiags := hclsyntax.ParseExpression([]byte(test.testExpr), "testexpr", hcl.Pos{Line: 1, Column: 1})
+			diags = append(diags, exprParseDiags...)
+			if expr == nil {
+				t.Fatalf("parsing test expr returned nil")
+			}
+
+			got, exprDiags := expr.Value(&hcl.EvalContext{
+				Functions: funcs,
+			})
+			diags = append(diags, exprDiags...)
+
+			if len(diags) != test.diagCount {
+				t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.diagCount)
+				for _, diag := range diags {
+					t.Logf("- %s", diag)
+				}
+			}
+
+			if !got.RawEquals(test.want) {
+				t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, test.want)
+			}
+		})
+	}
+}
diff --git a/ext/userfunc/doc.go b/ext/userfunc/doc.go
new file mode 100644
index 0000000..e4461d4
--- /dev/null
+++ b/ext/userfunc/doc.go
@@ -0,0 +1,22 @@
+// Package userfunc implements a HCL extension that allows user-defined
+// functions in HCL configuration.
+//
+// Using this extension requires some integration effort on the part of the
+// calling application, to pass any declared functions into a HCL evaluation
+// context after processing.
+//
+// The function declaration syntax looks like this:
+//
+//     function "foo" {
+//       params = ["name"]
+//       result = "Hello, ${name}!"
+//     }
+//
+// When a user-defined function is called, the expression given for the "result"
+// attribute is evaluated in an isolated evaluation context that defines variables
+// named after the given parameter names.
+//
+// The block name "function" may be overridden by the calling application, if
+// that default name conflicts with an existing block or attribute name in
+// the application.
+package userfunc
diff --git a/ext/userfunc/public.go b/ext/userfunc/public.go
new file mode 100644
index 0000000..5415c8c
--- /dev/null
+++ b/ext/userfunc/public.go
@@ -0,0 +1,42 @@
+package userfunc
+
+import (
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty/function"
+)
+
+// A ContextFunc is a callback used to produce the base EvalContext for
+// running a particular set of functions.
+//
+// This is a function rather than an EvalContext directly to allow functions
+// to be decoded before their context is complete. This will be true, for
+// example, for applications that wish to allow functions to refer to themselves.
+//
+// The simplest use of a ContextFunc is to give user functions access to the
+// same global variables and functions available elsewhere in an application's
+// configuration language, but more complex applications may use different
+// contexts to support lexical scoping depending on where in a configuration
+// structure a function declaration is found, etc.
+type ContextFunc func() *hcl.EvalContext
+
+// DecodeUserFunctions looks for blocks of the given type in the given body
+// and, for each one found, interprets it as a custom function definition.
+//
+// On success, the result is a mapping of function names to implementations,
+// along with a new body that represents the remaining content of the given
+// body which can be used for further processing.
+//
+// The result expression of each function is parsed during decoding but not
+// evaluated until the function is called.
+//
+// If the given ContextFunc is non-nil, it will be called to obtain the
+// context in which the function result expressions will be evaluated. If nil,
+// or if it returns nil, the result expression will have access only to
+// variables named after the declared parameters. A non-nil context turns
+// the returned functions into closures, bound to the given context.
+//
+// If the returned diagnostics set has errors then the function map and
+// remain body may be nil or incomplete.
+func DecodeUserFunctions(body hcl.Body, blockType string, context ContextFunc) (funcs map[string]function.Function, remain hcl.Body, diags hcl.Diagnostics) {
+	return decodeUserFunctions(body, blockType, context)
+}
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..8f7c79f
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,29 @@
+module github.com/hashicorp/hcl/v2
+
+go 1.18
+
+require (
+	github.com/agext/levenshtein v1.2.1
+	github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3
+	github.com/apparentlymart/go-textseg/v13 v13.0.0
+	github.com/davecgh/go-spew v1.1.1
+	github.com/go-test/deep v1.0.3
+	github.com/google/go-cmp v0.3.1
+	github.com/kr/pretty v0.1.0
+	github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348
+	github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7
+	github.com/sergi/go-diff v1.0.0
+	github.com/spf13/pflag v1.0.2
+	github.com/zclconf/go-cty v1.8.0
+	github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b
+	golang.org/x/crypto v0.0.0-20220517005047-85d78b3ac167
+)
+
+require (
+	github.com/kr/text v0.1.0 // indirect
+	github.com/pmezard/go-difflib v1.0.0 // indirect
+	github.com/stretchr/testify v1.2.2 // indirect
+	golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1 // indirect
+	golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 // indirect
+	golang.org/x/text v0.3.6 // indirect
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..2d2ddb3
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,66 @@
+github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8=
+github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
+github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3 h1:ZSTrOEhiM5J5RFxEaFvMZVEAM1KvT1YzbEOwB2EAGjA=
+github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM=
+github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0=
+github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk=
+github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
+github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
+github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
+github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4=
+github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
+github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM=
+github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
+github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
+github.com/spf13/pflag v1.0.2 h1:Fy0orTDgHdbnzHcsOgfCN4LtHf0ec3wwtiwJqwvf3Gc=
+github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
+github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
+github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4=
+github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI=
+github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8=
+github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA=
+github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk=
+github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b h1:FosyBZYxY34Wul7O/MSKey3txpPYyCqVO5ZyceuQJEI=
+github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20220517005047-85d78b3ac167 h1:O8uGbHCqlTp2P6QJSLmCojM4mN6UemYv8K+dCnmHmu0=
+golang.org/x/crypto v0.0.0-20220517005047-85d78b3ac167/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
+golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
+golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1 h1:SrN+KX8Art/Sf4HNj6Zcz06G7VEz+7w9tdXTPOZ7+l4=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
+google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
diff --git a/gohcl/decode.go b/gohcl/decode.go
new file mode 100644
index 0000000..2954f4c
--- /dev/null
+++ b/gohcl/decode.go
@@ -0,0 +1,320 @@
+package gohcl
+
+import (
+	"fmt"
+	"reflect"
+
+	"github.com/zclconf/go-cty/cty"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty/convert"
+	"github.com/zclconf/go-cty/cty/gocty"
+)
+
+// DecodeBody extracts the configuration within the given body into the given
+// value. This value must be a non-nil pointer to either a struct or
+// a map, where in the former case the configuration will be decoded using
+// struct tags and in the latter case only attributes are allowed and their
+// values are decoded into the map.
+//
+// The given EvalContext is used to resolve any variables or functions in
+// expressions encountered while decoding. This may be nil to require only
+// constant values, for simple applications that do not support variables or
+// functions.
+//
+// The returned diagnostics should be inspected with its HasErrors method to
+// determine if the populated value is valid and complete. If error diagnostics
+// are returned then the given value may have been partially-populated but
+// may still be accessed by a careful caller for static analysis and editor
+// integration use-cases.
+func DecodeBody(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
+	rv := reflect.ValueOf(val)
+	if rv.Kind() != reflect.Ptr {
+		panic(fmt.Sprintf("target value must be a pointer, not %s", rv.Type().String()))
+	}
+
+	return decodeBodyToValue(body, ctx, rv.Elem())
+}
+
+func decodeBodyToValue(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) hcl.Diagnostics {
+	et := val.Type()
+	switch et.Kind() {
+	case reflect.Struct:
+		return decodeBodyToStruct(body, ctx, val)
+	case reflect.Map:
+		return decodeBodyToMap(body, ctx, val)
+	default:
+		panic(fmt.Sprintf("target value must be pointer to struct or map, not %s", et.String()))
+	}
+}
+
+func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) hcl.Diagnostics {
+	schema, partial := ImpliedBodySchema(val.Interface())
+
+	var content *hcl.BodyContent
+	var leftovers hcl.Body
+	var diags hcl.Diagnostics
+	if partial {
+		content, leftovers, diags = body.PartialContent(schema)
+	} else {
+		content, diags = body.Content(schema)
+	}
+	if content == nil {
+		return diags
+	}
+
+	tags := getFieldTags(val.Type())
+
+	if tags.Body != nil {
+		fieldIdx := *tags.Body
+		field := val.Type().Field(fieldIdx)
+		fieldV := val.Field(fieldIdx)
+		switch {
+		case bodyType.AssignableTo(field.Type):
+			fieldV.Set(reflect.ValueOf(body))
+
+		default:
+			diags = append(diags, decodeBodyToValue(body, ctx, fieldV)...)
+		}
+	}
+
+	if tags.Remain != nil {
+		fieldIdx := *tags.Remain
+		field := val.Type().Field(fieldIdx)
+		fieldV := val.Field(fieldIdx)
+		switch {
+		case bodyType.AssignableTo(field.Type):
+			fieldV.Set(reflect.ValueOf(leftovers))
+		case attrsType.AssignableTo(field.Type):
+			attrs, attrsDiags := leftovers.JustAttributes()
+			if len(attrsDiags) > 0 {
+				diags = append(diags, attrsDiags...)
+			}
+			fieldV.Set(reflect.ValueOf(attrs))
+		default:
+			diags = append(diags, decodeBodyToValue(leftovers, ctx, fieldV)...)
+		}
+	}
+
+	for name, fieldIdx := range tags.Attributes {
+		attr := content.Attributes[name]
+		field := val.Type().Field(fieldIdx)
+		fieldV := val.Field(fieldIdx)
+
+		if attr == nil {
+			if !exprType.AssignableTo(field.Type) {
+				continue
+			}
+
+			// As a special case, if the target is of type hcl.Expression then
+			// we'll assign an actual expression that evalues to a cty null,
+			// so the caller can deal with it within the cty realm rather
+			// than within the Go realm.
+			synthExpr := hcl.StaticExpr(cty.NullVal(cty.DynamicPseudoType), body.MissingItemRange())
+			fieldV.Set(reflect.ValueOf(synthExpr))
+			continue
+		}
+
+		switch {
+		case attrType.AssignableTo(field.Type):
+			fieldV.Set(reflect.ValueOf(attr))
+		case exprType.AssignableTo(field.Type):
+			fieldV.Set(reflect.ValueOf(attr.Expr))
+		default:
+			diags = append(diags, DecodeExpression(
+				attr.Expr, ctx, fieldV.Addr().Interface(),
+			)...)
+		}
+	}
+
+	blocksByType := content.Blocks.ByType()
+
+	for typeName, fieldIdx := range tags.Blocks {
+		blocks := blocksByType[typeName]
+		field := val.Type().Field(fieldIdx)
+
+		ty := field.Type
+		isSlice := false
+		isPtr := false
+		if ty.Kind() == reflect.Slice {
+			isSlice = true
+			ty = ty.Elem()
+		}
+		if ty.Kind() == reflect.Ptr {
+			isPtr = true
+			ty = ty.Elem()
+		}
+
+		if len(blocks) > 1 && !isSlice {
+			diags = append(diags, &hcl.Diagnostic{
+				Severity: hcl.DiagError,
+				Summary:  fmt.Sprintf("Duplicate %s block", typeName),
+				Detail: fmt.Sprintf(
+					"Only one %s block is allowed. Another was defined at %s.",
+					typeName, blocks[0].DefRange.String(),
+				),
+				Subject: &blocks[1].DefRange,
+			})
+			continue
+		}
+
+		if len(blocks) == 0 {
+			if isSlice || isPtr {
+				if val.Field(fieldIdx).IsNil() {
+					val.Field(fieldIdx).Set(reflect.Zero(field.Type))
+				}
+			} else {
+				diags = append(diags, &hcl.Diagnostic{
+					Severity: hcl.DiagError,
+					Summary:  fmt.Sprintf("Missing %s block", typeName),
+					Detail:   fmt.Sprintf("A %s block is required.", typeName),
+					Subject:  body.MissingItemRange().Ptr(),
+				})
+			}
+			continue
+		}
+
+		switch {
+
+		case isSlice:
+			elemType := ty
+			if isPtr {
+				elemType = reflect.PtrTo(ty)
+			}
+			sli := val.Field(fieldIdx)
+			if sli.IsNil() {
+				sli = reflect.MakeSlice(reflect.SliceOf(elemType), len(blocks), len(blocks))
+			}
+
+			for i, block := range blocks {
+				if isPtr {
+					if i >= sli.Len() {
+						sli = reflect.Append(sli, reflect.New(ty))
+					}
+					v := sli.Index(i)
+					if v.IsNil() {
+						v = reflect.New(ty)
+					}
+					diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...)
+					sli.Index(i).Set(v)
+				} else {
+					if i >= sli.Len() {
+						sli = reflect.Append(sli, reflect.Indirect(reflect.New(ty)))
+					}
+					diags = append(diags, decodeBlockToValue(block, ctx, sli.Index(i))...)
+				}
+			}
+
+			if sli.Len() > len(blocks) {
+				sli.SetLen(len(blocks))
+			}
+
+			val.Field(fieldIdx).Set(sli)
+
+		default:
+			block := blocks[0]
+			if isPtr {
+				v := val.Field(fieldIdx)
+				if v.IsNil() {
+					v = reflect.New(ty)
+				}
+				diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...)
+				val.Field(fieldIdx).Set(v)
+			} else {
+				diags = append(diags, decodeBlockToValue(block, ctx, val.Field(fieldIdx))...)
+			}
+
+		}
+
+	}
+
+	return diags
+}
+
+func decodeBodyToMap(body hcl.Body, ctx *hcl.EvalContext, v reflect.Value) hcl.Diagnostics {
+	attrs, diags := body.JustAttributes()
+	if attrs == nil {
+		return diags
+	}
+
+	mv := reflect.MakeMap(v.Type())
+
+	for k, attr := range attrs {
+		switch {
+		case attrType.AssignableTo(v.Type().Elem()):
+			mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr))
+		case exprType.AssignableTo(v.Type().Elem()):
+			mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr.Expr))
+		default:
+			ev := reflect.New(v.Type().Elem())
+			diags = append(diags, DecodeExpression(attr.Expr, ctx, ev.Interface())...)
+			mv.SetMapIndex(reflect.ValueOf(k), ev.Elem())
+		}
+	}
+
+	v.Set(mv)
+
+	return diags
+}
+
+func decodeBlockToValue(block *hcl.Block, ctx *hcl.EvalContext, v reflect.Value) hcl.Diagnostics {
+	diags := decodeBodyToValue(block.Body, ctx, v)
+
+	if len(block.Labels) > 0 {
+		blockTags := getFieldTags(v.Type())
+		for li, lv := range block.Labels {
+			lfieldIdx := blockTags.Labels[li].FieldIndex
+			v.Field(lfieldIdx).Set(reflect.ValueOf(lv))
+		}
+	}
+
+	return diags
+}
+
+// DecodeExpression extracts the value of the given expression into the given
+// value. This value must be something that gocty is able to decode into,
+// since the final decoding is delegated to that package.
+//
+// The given EvalContext is used to resolve any variables or functions in
+// expressions encountered while decoding. This may be nil to require only
+// constant values, for simple applications that do not support variables or
+// functions.
+//
+// The returned diagnostics should be inspected with its HasErrors method to
+// determine if the populated value is valid and complete. If error diagnostics
+// are returned then the given value may have been partially-populated but
+// may still be accessed by a careful caller for static analysis and editor
+// integration use-cases.
+func DecodeExpression(expr hcl.Expression, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
+	srcVal, diags := expr.Value(ctx)
+
+	convTy, err := gocty.ImpliedType(val)
+	if err != nil {
+		panic(fmt.Sprintf("unsuitable DecodeExpression target: %s", err))
+	}
+
+	srcVal, err = convert.Convert(srcVal, convTy)
+	if err != nil {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Unsuitable value type",
+			Detail:   fmt.Sprintf("Unsuitable value: %s", err.Error()),
+			Subject:  expr.StartRange().Ptr(),
+			Context:  expr.Range().Ptr(),
+		})
+		return diags
+	}
+
+	err = gocty.FromCtyValue(srcVal, val)
+	if err != nil {
+		diags = append(diags, &hcl.Diagnostic{
+			Severity: hcl.DiagError,
+			Summary:  "Unsuitable value type",
+			Detail:   fmt.Sprintf("Unsuitable value: %s", err.Error()),
+			Subject:  expr.StartRange().Ptr(),
+			Context:  expr.Range().Ptr(),
+		})
+	}
+
+	return diags
+}
diff --git a/gohcl/decode_test.go b/gohcl/decode_test.go
new file mode 100644
index 0000000..50eaea7
--- /dev/null
+++ b/gohcl/decode_test.go
@@ -0,0 +1,810 @@
+package gohcl
+
+import (
+	"encoding/json"
+	"fmt"
+	"reflect"
+	"testing"
+
+	"github.com/davecgh/go-spew/spew"
+	"github.com/hashicorp/hcl/v2"
+	hclJSON "github.com/hashicorp/hcl/v2/json"
+	"github.com/zclconf/go-cty/cty"
+)
+
+func TestDecodeBody(t *testing.T) {
+	deepEquals := func(other interface{}) func(v interface{}) bool {
+		return func(v interface{}) bool {
+			return reflect.DeepEqual(v, other)
+		}
+	}
+
+	type withNameExpression struct {
+		Name hcl.Expression `hcl:"name"`
+	}
+
+	type withTwoAttributes struct {
+		A string `hcl:"a,optional"`
+		B string `hcl:"b,optional"`
+	}
+
+	type withNestedBlock struct {
+		Plain  string             `hcl:"plain,optional"`
+		Nested *withTwoAttributes `hcl:"nested,block"`
+	}
+
+	type withListofNestedBlocks struct {
+		Nested []*withTwoAttributes `hcl:"nested,block"`
+	}
+
+	type withListofNestedBlocksNoPointers struct {
+		Nested []withTwoAttributes `hcl:"nested,block"`
+	}
+
+	tests := []struct {
+		Body      map[string]interface{}
+		Target    func() interface{}
+		Check     func(v interface{}) bool
+		DiagCount int
+	}{
+		{
+			map[string]interface{}{},
+			makeInstantiateType(struct{}{}),
+			deepEquals(struct{}{}),
+			0,
+		},
+		{
+			map[string]interface{}{},
+			makeInstantiateType(struct {
+				Name string `hcl:"name"`
+			}{}),
+			deepEquals(struct {
+				Name string `hcl:"name"`
+			}{}),
+			1, // name is required
+		},
+		{
+			map[string]interface{}{},
+			makeInstantiateType(struct {
+				Name *string `hcl:"name"`
+			}{}),
+			deepEquals(struct {
+				Name *string `hcl:"name"`
+			}{}),
+			0,
+		}, // name nil
+		{
+			map[string]interface{}{},
+			makeInstantiateType(struct {
+				Name string `hcl:"name,optional"`
+			}{}),
+			deepEquals(struct {
+				Name string `hcl:"name,optional"`
+			}{}),
+			0,
+		}, // name optional
+		{
+			map[string]interface{}{},
+			makeInstantiateType(withNameExpression{}),
+			func(v interface{}) bool {
+				if v == nil {
+					return false
+				}
+
+				wne, valid := v.(withNameExpression)
+				if !valid {
+					return false
+				}
+
+				if wne.Name == nil {
+					return false
+				}
+
+				nameVal, _ := wne.Name.Value(nil)
+				if !nameVal.IsNull() {
+					return false
+				}
+
+				return true
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"name": "Ermintrude",
+			},
+			makeInstantiateType(withNameExpression{}),
+			func(v interface{}) bool {
+				if v == nil {
+					return false
+				}
+
+				wne, valid := v.(withNameExpression)
+				if !valid {
+					return false
+				}
+
+				if wne.Name == nil {
+					return false
+				}
+
+				nameVal, _ := wne.Name.Value(nil)
+				if !nameVal.Equals(cty.StringVal("Ermintrude")).True() {
+					return false
+				}
+
+				return true
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"name": "Ermintrude",
+			},
+			makeInstantiateType(struct {
+				Name string `hcl:"name"`
+			}{}),
+			deepEquals(struct {
+				Name string `hcl:"name"`
+			}{"Ermintrude"}),
+			0,
+		},
+		{
+			map[string]interface{}{
+				"name": "Ermintrude",
+				"age":  23,
+			},
+			makeInstantiateType(struct {
+				Name string `hcl:"name"`
+			}{}),
+			deepEquals(struct {
+				Name string `hcl:"name"`
+			}{"Ermintrude"}),
+			1, // Extraneous "age" property
+		},
+		{
+			map[string]interface{}{
+				"name": "Ermintrude",
+				"age":  50,
+			},
+			makeInstantiateType(struct {
+				Name  string         `hcl:"name"`
+				Attrs hcl.Attributes `hcl:",remain"`
+			}{}),
+			func(gotI interface{}) bool {
+				got := gotI.(struct {
+					Name  string         `hcl:"name"`
+					Attrs hcl.Attributes `hcl:",remain"`
+				})
+				return got.Name == "Ermintrude" && len(got.Attrs) == 1 && got.Attrs["age"] != nil
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"name": "Ermintrude",
+				"age":  50,
+			},
+			makeInstantiateType(struct {
+				Name   string   `hcl:"name"`
+				Remain hcl.Body `hcl:",remain"`
+			}{}),
+			func(gotI interface{}) bool {
+				got := gotI.(struct {
+					Name   string   `hcl:"name"`
+					Remain hcl.Body `hcl:",remain"`
+				})
+
+				attrs, _ := got.Remain.JustAttributes()
+
+				return got.Name == "Ermintrude" && len(attrs) == 1 && attrs["age"] != nil
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"name":   "Ermintrude",
+				"living": true,
+			},
+			makeInstantiateType(struct {
+				Name   string               `hcl:"name"`
+				Remain map[string]cty.Value `hcl:",remain"`
+			}{}),
+			deepEquals(struct {
+				Name   string               `hcl:"name"`
+				Remain map[string]cty.Value `hcl:",remain"`
+			}{
+				Name: "Ermintrude",
+				Remain: map[string]cty.Value{
+					"living": cty.True,
+				},
+			}),
+			0,
+		},
+		{
+			map[string]interface{}{
+				"name": "Ermintrude",
+				"age":  50,
+			},
+			makeInstantiateType(struct {
+				Name   string   `hcl:"name"`
+				Body   hcl.Body `hcl:",body"`
+				Remain hcl.Body `hcl:",remain"`
+			}{}),
+			func(gotI interface{}) bool {
+				got := gotI.(struct {
+					Name   string   `hcl:"name"`
+					Body   hcl.Body `hcl:",body"`
+					Remain hcl.Body `hcl:",remain"`
+				})
+
+				attrs, _ := got.Body.JustAttributes()
+
+				return got.Name == "Ermintrude" && len(attrs) == 2 &&
+					attrs["name"] != nil && attrs["age"] != nil
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": map[string]interface{}{},
+			},
+			makeInstantiateType(struct {
+				Noodle struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				// Generating no diagnostics is good enough for this one.
+				return true
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": []map[string]interface{}{{}},
+			},
+			makeInstantiateType(struct {
+				Noodle struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				// Generating no diagnostics is good enough for this one.
+				return true
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": []map[string]interface{}{{}, {}},
+			},
+			makeInstantiateType(struct {
+				Noodle struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				// Generating one diagnostic is good enough for this one.
+				return true
+			},
+			1,
+		},
+		{
+			map[string]interface{}{},
+			makeInstantiateType(struct {
+				Noodle struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				// Generating one diagnostic is good enough for this one.
+				return true
+			},
+			1,
+		},
+		{
+			map[string]interface{}{
+				"noodle": []map[string]interface{}{},
+			},
+			makeInstantiateType(struct {
+				Noodle struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				// Generating one diagnostic is good enough for this one.
+				return true
+			},
+			1,
+		},
+		{
+			map[string]interface{}{
+				"noodle": map[string]interface{}{},
+			},
+			makeInstantiateType(struct {
+				Noodle *struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				return gotI.(struct {
+					Noodle *struct{} `hcl:"noodle,block"`
+				}).Noodle != nil
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": []map[string]interface{}{{}},
+			},
+			makeInstantiateType(struct {
+				Noodle *struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				return gotI.(struct {
+					Noodle *struct{} `hcl:"noodle,block"`
+				}).Noodle != nil
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": []map[string]interface{}{},
+			},
+			makeInstantiateType(struct {
+				Noodle *struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				return gotI.(struct {
+					Noodle *struct{} `hcl:"noodle,block"`
+				}).Noodle == nil
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": []map[string]interface{}{{}, {}},
+			},
+			makeInstantiateType(struct {
+				Noodle *struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				// Generating one diagnostic is good enough for this one.
+				return true
+			},
+			1,
+		},
+		{
+			map[string]interface{}{
+				"noodle": []map[string]interface{}{},
+			},
+			makeInstantiateType(struct {
+				Noodle []struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				noodle := gotI.(struct {
+					Noodle []struct{} `hcl:"noodle,block"`
+				}).Noodle
+				return len(noodle) == 0
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": []map[string]interface{}{{}},
+			},
+			makeInstantiateType(struct {
+				Noodle []struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				noodle := gotI.(struct {
+					Noodle []struct{} `hcl:"noodle,block"`
+				}).Noodle
+				return len(noodle) == 1
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": []map[string]interface{}{{}, {}},
+			},
+			makeInstantiateType(struct {
+				Noodle []struct{} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				noodle := gotI.(struct {
+					Noodle []struct{} `hcl:"noodle,block"`
+				}).Noodle
+				return len(noodle) == 2
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": map[string]interface{}{},
+			},
+			makeInstantiateType(struct {
+				Noodle struct {
+					Name string `hcl:"name,label"`
+				} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				// Generating two diagnostics is good enough for this one.
+				// (one for the missing noodle block and the other for
+				// the JSON serialization detecting the missing level of
+				// heirarchy for the label.)
+				return true
+			},
+			2,
+		},
+		{
+			map[string]interface{}{
+				"noodle": map[string]interface{}{
+					"foo_foo": map[string]interface{}{},
+				},
+			},
+			makeInstantiateType(struct {
+				Noodle struct {
+					Name string `hcl:"name,label"`
+				} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				noodle := gotI.(struct {
+					Noodle struct {
+						Name string `hcl:"name,label"`
+					} `hcl:"noodle,block"`
+				}).Noodle
+				return noodle.Name == "foo_foo"
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": map[string]interface{}{
+					"foo_foo": map[string]interface{}{},
+					"bar_baz": map[string]interface{}{},
+				},
+			},
+			makeInstantiateType(struct {
+				Noodle struct {
+					Name string `hcl:"name,label"`
+				} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				// One diagnostic is enough for this one.
+				return true
+			},
+			1,
+		},
+		{
+			map[string]interface{}{
+				"noodle": map[string]interface{}{
+					"foo_foo": map[string]interface{}{},
+					"bar_baz": map[string]interface{}{},
+				},
+			},
+			makeInstantiateType(struct {
+				Noodles []struct {
+					Name string `hcl:"name,label"`
+				} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				noodles := gotI.(struct {
+					Noodles []struct {
+						Name string `hcl:"name,label"`
+					} `hcl:"noodle,block"`
+				}).Noodles
+				return len(noodles) == 2 && (noodles[0].Name == "foo_foo" || noodles[0].Name == "bar_baz") && (noodles[1].Name == "foo_foo" || noodles[1].Name == "bar_baz") && noodles[0].Name != noodles[1].Name
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"noodle": map[string]interface{}{
+					"foo_foo": map[string]interface{}{
+						"type": "rice",
+					},
+				},
+			},
+			makeInstantiateType(struct {
+				Noodle struct {
+					Name string `hcl:"name,label"`
+					Type string `hcl:"type"`
+				} `hcl:"noodle,block"`
+			}{}),
+			func(gotI interface{}) bool {
+				noodle := gotI.(struct {
+					Noodle struct {
+						Name string `hcl:"name,label"`
+						Type string `hcl:"type"`
+					} `hcl:"noodle,block"`
+				}).Noodle
+				return noodle.Name == "foo_foo" && noodle.Type == "rice"
+			},
+			0,
+		},
+
+		{
+			map[string]interface{}{
+				"name": "Ermintrude",
+				"age":  34,
+			},
+			makeInstantiateType(map[string]string(nil)),
+			deepEquals(map[string]string{
+				"name": "Ermintrude",
+				"age":  "34",
+			}),
+			0,
+		},
+		{
+			map[string]interface{}{
+				"name": "Ermintrude",
+				"age":  89,
+			},
+			makeInstantiateType(map[string]*hcl.Attribute(nil)),
+			func(gotI interface{}) bool {
+				got := gotI.(map[string]*hcl.Attribute)
+				return len(got) == 2 && got["name"] != nil && got["age"] != nil
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"name": "Ermintrude",
+				"age":  13,
+			},
+			makeInstantiateType(map[string]hcl.Expression(nil)),
+			func(gotI interface{}) bool {
+				got := gotI.(map[string]hcl.Expression)
+				return len(got) == 2 && got["name"] != nil && got["age"] != nil
+			},
+			0,
+		},
+		{
+			map[string]interface{}{
+				"name":   "Ermintrude",
+				"living": true,
+			},
+			makeInstantiateType(map[string]cty.Value(nil)),
+			deepEquals(map[string]cty.Value{
+				"name":   cty.StringVal("Ermintrude"),
+				"living": cty.True,
+			}),
+			0,
+		},
+		{
+			// Retain "nested" block while decoding
+			map[string]interface{}{
+				"plain": "foo",
+			},
+			func() interface{} {
+				return &withNestedBlock{
+					Plain: "bar",
+					Nested: &withTwoAttributes{
+						A: "bar",
+					},
+				}
+			},
+			func(gotI interface{}) bool {
+				foo := gotI.(withNestedBlock)
+				return foo.Plain == "foo" && foo.Nested != nil && foo.Nested.A == "bar"
+			},
+			0,
+		},
+		{
+			// Retain values in "nested" block while decoding
+			map[string]interface{}{
+				"nested": map[string]interface{}{
+					"a": "foo",
+				},
+			},
+			func() interface{} {
+				return &withNestedBlock{
+					Nested: &withTwoAttributes{
+						B: "bar",
+					},
+				}
+			},
+			func(gotI interface{}) bool {
+				foo := gotI.(withNestedBlock)
+				return foo.Nested.A == "foo" && foo.Nested.B == "bar"
+			},
+			0,
+		},
+		{
+			// Retain values in "nested" block list while decoding
+			map[string]interface{}{
+				"nested": []map[string]interface{}{
+					{
+						"a": "foo",
+					},
+				},
+			},
+			func() interface{} {
+				return &withListofNestedBlocks{
+					Nested: []*withTwoAttributes{
+						&withTwoAttributes{
+							B: "bar",
+						},
+					},
+				}
+			},
+			func(gotI interface{}) bool {
+				n := gotI.(withListofNestedBlocks)
+				return n.Nested[0].A == "foo" && n.Nested[0].B == "bar"
+			},
+			0,
+		},
+		{
+			// Remove additional elements from the list while decoding nested blocks
+			map[string]interface{}{
+				"nested": []map[string]interface{}{
+					{
+						"a": "foo",
+					},
+				},
+			},
+			func() interface{} {
+				return &withListofNestedBlocks{
+					Nested: []*withTwoAttributes{
+						&withTwoAttributes{
+							B: "bar",
+						},
+						&withTwoAttributes{
+							B: "bar",
+						},
+					},
+				}
+			},
+			func(gotI interface{}) bool {
+				n := gotI.(withListofNestedBlocks)
+				return len(n.Nested) == 1
+			},
+			0,
+		},
+		{
+			// Make sure decoding value slices works the same as pointer slices.
+			map[string]interface{}{
+				"nested": []map[string]interface{}{
+					{
+						"b": "bar",
+					},
+					{
+						"b": "baz",
+					},
+				},
+			},
+			func() interface{} {
+				return &withListofNestedBlocksNoPointers{
+					Nested: []withTwoAttributes{
+						{
+							B: "foo",
+						},
+					},
+				}
+			},
+			func(gotI interface{}) bool {
+				n := gotI.(withListofNestedBlocksNoPointers)
+				return n.Nested[0].B == "bar" && len(n.Nested) == 2
+			},
+			0,
+		},
+	}
+
+	for i, test := range tests {
+		// For convenience here we're going to use the JSON parser
+		// to process the given body.
+		buf, err := json.Marshal(test.Body)
+		if err != nil {
+			t.Fatalf("error JSON-encoding body for test %d: %s", i, err)
+		}
+
+		t.Run(string(buf), func(t *testing.T) {
+			file, diags := hclJSON.Parse(buf, "test.json")
+			if len(diags) != 0 {
+				t.Fatalf("diagnostics while parsing: %s", diags.Error())
+			}
+
+			targetVal := reflect.ValueOf(test.Target())
+
+			diags = DecodeBody(file.Body, nil, targetVal.Interface())
+			if len(diags) != test.DiagCount {
+				t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.DiagCount)
+				for _, diag := range diags {
+					t.Logf(" - %s", diag.Error())
+				}
+			}
+			got := targetVal.Elem().Interface()
+			if !test.Check(got) {
+				t.Errorf("wrong result\ngot:  %s", spew.Sdump(got))
+			}
+		})
+	}
+
+}
+
+func TestDecodeExpression(t *testing.T) {
+	tests := []struct {
+		Value     cty.Value
+		Target    interface{}
+		Want      interface{}
+		DiagCount int
+	}{
+		{
+			cty.StringVal("hello"),
+			"",
+			"hello",
+			0,
+		},
+		{
+			cty.StringVal("hello"),
+			cty.NilVal,
+			cty.StringVal("hello"),
+			0,
+		},
+		{
+			cty.NumberIntVal(2),
+			"",
+			"2",
+			0,
+		},
+		{
+			cty.StringVal("true"),
+			false,
+			true,
+			0,
+		},
+		{
+			cty.NullVal(cty.String),
+			"",
+			"",
+			1, // null value is not allowed
+		},
+		{
+			cty.UnknownVal(cty.String),
+			"",
+			"",
+			1, // value must be known
+		},
+		{
+			cty.ListVal([]cty.Value{cty.True}),
+			false,
+			false,
+			1, // bool required
+		},
+	}
+
+	for i, test := range tests {
+		t.Run(fmt.Sprintf("%02d", i), func(t *testing.T) {
+			expr := &fixedExpression{test.Value}
+
+			targetVal := reflect.New(reflect.TypeOf(test.Target))
+
+			diags := DecodeExpression(expr, nil, targetVal.Interface())
+			if len(diags) != test.DiagCount {
+				t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.DiagCount)
+				for _, diag := range diags {
+					t.Logf(" - %s", diag.Error())
+				}
+			}
+			got := targetVal.Elem().Interface()
+			if !reflect.DeepEqual(got, test.Want) {
+				t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, test.Want)
+			}
+		})
+	}
+}
+
+type fixedExpression struct {
+	val cty.Value
+}
+
+func (e *fixedExpression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+	return e.val, nil
+}
+
+func (e *fixedExpression) Range() (r hcl.Range) {
+	return
+}
+func (e *fixedExpression) StartRange() (r hcl.Range) {
+	return
+}
+
+func (e *fixedExpression) Variables() []hcl.Traversal {
+	return nil
+}
+
+func makeInstantiateType(target interface{}) func() interface{} {
+	return func() interface{} {
+		return reflect.New(reflect.TypeOf(target)).Interface()
+	}
+}
diff --git a/gohcl/doc.go b/gohcl/doc.go
new file mode 100644
index 0000000..9dcd970
--- /dev/null
+++ b/gohcl/doc.go
@@ -0,0 +1,62 @@
+// Package gohcl allows decoding HCL configurations into Go data structures.
+//
+// It provides a convenient and concise way of describing the schema for
+// configuration and then accessing the resulting data via native Go
+// types.
+//
+// A struct field tag scheme is used, similar to other decoding and
+// unmarshalling libraries. The tags are formatted as in the following example:
+//
+//    ThingType string `hcl:"thing_type,attr"`
+//
+// Within each tag there are two comma-separated tokens. The first is the
+// name of the corresponding construct in configuration, while the second
+// is a keyword giving the kind of construct expected. The following
+// kind keywords are supported:
+//
+//    attr (the default) indicates that the value is to be populated from an attribute
+//    block indicates that the value is to populated from a block
+//    label indicates that the value is to populated from a block label
+//    optional is the same as attr, but the field is optional
+//    remain indicates that the value is to be populated from the remaining body after populating other fields
+//
+// "attr" fields may either be of type *hcl.Expression, in which case the raw
+// expression is assigned, or of any type accepted by gocty, in which case
+// gocty will be used to assign the value to a native Go type.
+//
+// "block" fields may be a struct that recursively uses the same tags, or a
+// slice of such structs, in which case multiple blocks of the corresponding
+// type are decoded into the slice.
+//
+// "body" can be placed on a single field of type hcl.Body to capture
+// the full hcl.Body that was decoded for a block. This does not allow leftover
+// values like "remain", so a decoding error will still be returned if leftover
+// fields are given. If you want to capture the decoding body PLUS leftover
+// fields, you must specify a "remain" field as well to prevent errors. The
+// body field and the remain field will both contain the leftover fields.
+//
+// "label" fields are considered only in a struct used as the type of a field
+// marked as "block", and are used sequentially to capture the labels of
+// the blocks being decoded. In this case, the name token is used only as
+// an identifier for the label in diagnostic messages.
+//
+// "optional" fields behave like "attr" fields, but they are optional
+// and will not give parsing errors if they are missing.
+//
+// "remain" can be placed on a single field that may be either of type
+// hcl.Body or hcl.Attributes, in which case any remaining body content is
+// placed into this field for delayed processing. If no "remain" field is
+// present then any attributes or blocks not matched by another valid tag
+// will cause an error diagnostic.
+//
+// Only a subset of this tagging/typing vocabulary is supported for the
+// "Encode" family of functions. See the EncodeIntoBody docs for full details
+// on the constraints there.
+//
+// Broadly-speaking this package deals with two types of error. The first is
+// errors in the configuration itself, which are returned as diagnostics
+// written with the configuration author as the target audience. The second
+// is bugs in the calling program, such as invalid struct tags, which are
+// surfaced via panics since there can be no useful runtime handling of such
+// errors and they should certainly not be returned to the user as diagnostics.
+package gohcl
diff --git a/gohcl/encode.go b/gohcl/encode.go
new file mode 100644
index 0000000..d612e09
--- /dev/null
+++ b/gohcl/encode.go
@@ -0,0 +1,191 @@
+package gohcl
+
+import (
+	"fmt"
+	"reflect"
+	"sort"
+
+	"github.com/hashicorp/hcl/v2/hclwrite"
+	"github.com/zclconf/go-cty/cty/gocty"
+)
+
+// EncodeIntoBody replaces the contents of the given hclwrite Body with
+// attributes and blocks derived from the given value, which must be a
+// struct value or a pointer to a struct value with the struct tags defined
+// in this package.
+//
+// This function can work only with fully-decoded data. It will ignore any
+// fields tagged as "remain", any fields that decode attributes into either
+// hcl.Attribute or hcl.Expression values, and any fields that decode blocks
+// into hcl.Attributes values. This function does not have enough information
+// to complete the decoding of these types.
+//
+// Any fields tagged as "label" are ignored by this function. Use EncodeAsBlock
+// to produce a whole hclwrite.Block including block labels.
+//
+// As long as a suitable value is given to encode and the destination body
+// is non-nil, this function will always complete. It will panic in case of
+// any errors in the calling program, such as passing an inappropriate type
+// or a nil body.
+//
+// The layout of the resulting HCL source is derived from the ordering of
+// the struct fields, with blank lines around nested blocks of different types.
+// Fields representing attributes should usually precede those representing
+// blocks so that the attributes can group togather in the result. For more
+// control, use the hclwrite API directly.
+func EncodeIntoBody(val interface{}, dst *hclwrite.Body) {
+	rv := reflect.ValueOf(val)
+	ty := rv.Type()
+	if ty.Kind() == reflect.Ptr {
+		rv = rv.Elem()
+		ty = rv.Type()
+	}
+	if ty.Kind() != reflect.Struct {
+		panic(fmt.Sprintf("value is %s, not struct", ty.Kind()))
+	}
+
+	tags := getFieldTags(ty)
+	populateBody(rv, ty, tags, dst)
+}
+
+// EncodeAsBlock creates a new hclwrite.Block populated with the data from
+// the given value, which must be a struct or pointer to struct with the
+// struct tags defined in this package.
+//
+// If the given struct type has fields tagged with "label" tags then they
+// will be used in order to annotate the created block with labels.
+//
+// This function has the same constraints as EncodeIntoBody and will panic
+// if they are violated.
+func EncodeAsBlock(val interface{}, blockType string) *hclwrite.Block {
+	rv := reflect.ValueOf(val)
+	ty := rv.Type()
+	if ty.Kind() == reflect.Ptr {
+		rv = rv.Elem()
+		ty = rv.Type()
+	}
+	if ty.Kind() != reflect.Struct {
+		panic(fmt.Sprintf("value is %s, not struct", ty.Kind()))
+	}
+
+	tags := getFieldTags(ty)
+	labels := make([]string, len(tags.Labels))
+	for i, lf := range tags.Labels {
+		lv := rv.Field(lf.FieldIndex)
+		// We just stringify whatever we find. It should always be a string
+		// but if not then we'll still do something reasonable.
+		labels[i] = fmt.Sprintf("%s", lv.Interface())
+	}
+
+	block := hclwrite.NewBlock(blockType, labels)
+	populateBody(rv, ty, tags, block.Body())
+	return block
+}
+
+func populateBody(rv reflect.Value, ty reflect.Type, tags *fieldTags, dst *hclwrite.Body) {
+	nameIdxs := make(map[string]int, len(tags.Attributes)+len(tags.Blocks))
+	namesOrder := make([]string, 0, len(tags.Attributes)+len(tags.Blocks))
+	for n, i := range tags.Attributes {
+		nameIdxs[n] = i
+		namesOrder = append(namesOrder, n)
+	}
+	for n, i := range tags.Blocks {
+		nameIdxs[n] = i
+		namesOrder = append(namesOrder, n)
+	}
+	sort.SliceStable(namesOrder, func(i, j int) bool {
+		ni, nj := namesOrder[i], namesOrder[j]
+		return nameIdxs[ni] < nameIdxs[nj]
+	})
+
+	dst.Clear()
+
+	prevWasBlock := false
+	for _, name := range namesOrder {
+		fieldIdx := nameIdxs[name]
+		field := ty.Field(fieldIdx)
+		fieldTy := field.Type
+		fieldVal := rv.Field(fieldIdx)
+
+		if fieldTy.Kind() == reflect.Ptr {
+			fieldTy = fieldTy.Elem()
+			fieldVal = fieldVal.Elem()
+		}
+
+		if _, isAttr := tags.Attributes[name]; isAttr {
+
+			if exprType.AssignableTo(fieldTy) || attrType.AssignableTo(fieldTy) {
+				continue // ignore undecoded fields
+			}
+			if !fieldVal.IsValid() {
+				continue // ignore (field value is nil pointer)
+			}
+			if fieldTy.Kind() == reflect.Ptr && fieldVal.IsNil() {
+				continue // ignore
+			}
+			if prevWasBlock {
+				dst.AppendNewline()
+				prevWasBlock = false
+			}
+
+			valTy, err := gocty.ImpliedType(fieldVal.Interface())
+			if err != nil {
+				panic(fmt.Sprintf("cannot encode %T as HCL expression: %s", fieldVal.Interface(), err))
+			}
+
+			val, err := gocty.ToCtyValue(fieldVal.Interface(), valTy)
+			if err != nil {
+				// This should never happen, since we should always be able
+				// to decode into the implied type.
+				panic(fmt.Sprintf("failed to encode %T as %#v: %s", fieldVal.Interface(), valTy, err))
+			}
+
+			dst.SetAttributeValue(name, val)
+
+		} else { // must be a block, then
+			elemTy := fieldTy
+			isSeq := false
+			if elemTy.Kind() == reflect.Slice || elemTy.Kind() == reflect.Array {
+				isSeq = true
+				elemTy = elemTy.Elem()
+			}
+
+			if bodyType.AssignableTo(elemTy) || attrsType.AssignableTo(elemTy) {
+				continue // ignore undecoded fields
+			}
+			prevWasBlock = false
+
+			if isSeq {
+				l := fieldVal.Len()
+				for i := 0; i < l; i++ {
+					elemVal := fieldVal.Index(i)
+					if !elemVal.IsValid() {
+						continue // ignore (elem value is nil pointer)
+					}
+					if elemTy.Kind() == reflect.Ptr && elemVal.IsNil() {
+						continue // ignore
+					}
+					block := EncodeAsBlock(elemVal.Interface(), name)
+					if !prevWasBlock {
+						dst.AppendNewline()
+						prevWasBlock = true
+					}
+					dst.AppendBlock(block)
+				}
+			} else {
+				if !fieldVal.IsValid() {
+					continue // ignore (field value is nil pointer)
+				}
+				if elemTy.Kind() == reflect.Ptr && fieldVal.IsNil() {
+					continue // ignore
+				}
+				block := EncodeAsBlock(fieldVal.Interface(), name)
+				if !prevWasBlock {
+					dst.AppendNewline()
+					prevWasBlock = true
+				}
+				dst.AppendBlock(block)
+			}
+		}
+	}
+}
diff --git a/gohcl/encode_test.go b/gohcl/encode_test.go
new file mode 100644
index 0000000..9778df7
--- /dev/null
+++ b/gohcl/encode_test.go
@@ -0,0 +1,64 @@
+package gohcl_test
+
+import (
+	"fmt"
+
+	"github.com/hashicorp/hcl/v2/gohcl"
+	"github.com/hashicorp/hcl/v2/hclwrite"
+)
+
+func ExampleEncodeIntoBody() {
+	type Service struct {
+		Name string   `hcl:"name,label"`
+		Exe  []string `hcl:"executable"`
+	}
+	type Constraints struct {
+		OS   string `hcl:"os"`
+		Arch string `hcl:"arch"`
+	}
+	type App struct {
+		Name        string       `hcl:"name"`
+		Desc        string       `hcl:"description"`
+		Constraints *Constraints `hcl:"constraints,block"`
+		Services    []Service    `hcl:"service,block"`
+	}
+
+	app := App{
+		Name: "awesome-app",
+		Desc: "Such an awesome application",
+		Constraints: &Constraints{
+			OS:   "linux",
+			Arch: "amd64",
+		},
+		Services: []Service{
+			{
+				Name: "web",
+				Exe:  []string{"./web", "--listen=:8080"},
+			},
+			{
+				Name: "worker",
+				Exe:  []string{"./worker"},
+			},
+		},
+	}
+
+	f := hclwrite.NewEmptyFile()
+	gohcl.EncodeIntoBody(&app, f.Body())
+	fmt.Printf("%s", f.Bytes())
+
+	// Output:
+	// name        = "awesome-app"
+	// description = "Such an awesome application"
+	//
+	// constraints {
+	//   os   = "linux"
+	//   arch = "amd64"
+	// }
+	//
+	// service "web" {
+	//   executable = ["./web", "--listen=:8080"]
+	// }
+	// service "worker" {
+	//   executable = ["./worker"]
+	// }
+}
diff --git a/gohcl/schema.go b/gohcl/schema.go
new file mode 100644
index 0000000..df21cc4
--- /dev/null
+++ b/gohcl/schema.go
@@ -0,0 +1,181 @@
+package gohcl
+
+import (
+	"fmt"
+	"reflect"
+	"sort"
+	"strings"
+
+	"github.com/hashicorp/hcl/v2"
+)
+
+// ImpliedBodySchema produces a hcl.BodySchema derived from the type of the
+// given value, which must be a struct value or a pointer to one. If an
+// inappropriate value is passed, this function will panic.
+//
+// The second return argument indicates whether the given struct includes
+// a "remain" field, and thus the returned schema is non-exhaustive.
+//
+// This uses the tags on the fields of the struct to discover how each
+// field's value should be expressed within configuration. If an invalid
+// mapping is attempted, this function will panic.
+func ImpliedBodySchema(val interface{}) (schema *hcl.BodySchema, partial bool) {
+	ty := reflect.TypeOf(val)
+
+	if ty.Kind() == reflect.Ptr {
+		ty = ty.Elem()
+	}
+
+	if ty.Kind() != reflect.Struct {
+		panic(fmt.Sprintf("given value must be struct, not %T", val))
+	}
+
+	var attrSchemas []hcl.AttributeSchema
+	var blockSchemas []hcl.BlockHeaderSchema
+
+	tags := getFieldTags(ty)
+
+	attrNames := make([]string, 0, len(tags.Attributes))
+	for n := range tags.Attributes {
+		attrNames = append(attrNames, n)
+	}
+	sort.Strings(attrNames)
+	for _, n := range attrNames {
+		idx := tags.Attributes[n]
+		optional := tags.Optional[n]
+		field := ty.Field(idx)
+
+		var required bool
+
+		switch {
+		case field.Type.AssignableTo(exprType):
+			// If we're decoding to hcl.Expression then absense can be
+			// indicated via a null value, so we don't specify that
+			// the field is required during decoding.
+			required = false
+		case field.Type.Kind() != reflect.Ptr && !optional:
+			required = true
+		default:
+			required = false
+		}
+
+		attrSchemas = append(attrSchemas, hcl.AttributeSchema{
+			Name:     n,
+			Required: required,
+		})
+	}
+
+	blockNames := make([]string, 0, len(tags.Blocks))
+	for n := range tags.Blocks {
+		blockNames = append(blockNames, n)
+	}
+	sort.Strings(blockNames)
+	for _, n := range blockNames {
+		idx := tags.Blocks[n]
+		field := ty.Field(idx)
+		fty := field.Type
+		if fty.Kind() == reflect.Slice {
+			fty = fty.Elem()
+		}
+		if fty.Kind() == reflect.Ptr {
+			fty = fty.Elem()
+		}
+		if fty.Kind() != reflect.Struct {
+			panic(fmt.Sprintf(
+				"hcl 'block' tag kind cannot be applied to %s field %s: struct required", field.Type.String(), field.Name,
+			))
+		}
+		ftags := getFieldTags(fty)
+		var labelNames []string
+		if len(ftags.Labels) > 0 {
+			labelNames = make([]string, len(ftags.Labels))
+			for i, l := range ftags.Labels {
+				labelNames[i] = l.Name
+			}
+		}
+
+		blockSchemas = append(blockSchemas, hcl.BlockHeaderSchema{
+			Type:       n,
+			LabelNames: labelNames,
+		})
+	}
+
+	partial = tags.Remain != nil
+	schema = &hcl.BodySchema{
+		Attributes: attrSchemas,
+		Blocks:     blockSchemas,
+	}
+	return schema, partial
+}
+
+type fieldTags struct {
+	Attributes map[string]int
+	Blocks     map[string]int
+	Labels     []labelField
+	Remain     *int
+	Body       *int
+	Optional   map[string]bool
+}
+
+type labelField struct {
+	FieldIndex int
+	Name       string
+}
+
+func getFieldTags(ty reflect.Type) *fieldTags {
+	ret := &fieldTags{
+		Attributes: map[string]int{},
+		Blocks:     map[string]int{},
+		Optional:   map[string]bool{},
+	}
+
+	ct := ty.NumField()
+	for i := 0; i < ct; i++ {
+		field := ty.Field(i)
+		tag := field.Tag.Get("hcl")
+		if tag == "" {
+			continue
+		}
+
+		comma := strings.Index(tag, ",")
+		var name, kind string
+		if comma != -1 {
+			name = tag[:comma]
+			kind = tag[comma+1:]
+		} else {
+			name = tag
+			kind = "attr"
+		}
+
+		switch kind {
+		case "attr":
+			ret.Attributes[name] = i
+		case "block":
+			ret.Blocks[name] = i
+		case "label":
+			ret.Labels = append(ret.Labels, labelField{
+				FieldIndex: i,
+				Name:       name,
+			})
+		case "remain":
+			if ret.Remain != nil {
+				panic("only one 'remain' tag is permitted")
+			}
+			idx := i // copy, because this loop will continue assigning to i
+			ret.Remain = &idx
+		case "body":
+			if ret.Body != nil {
+				panic("only one 'body' tag is permitted")
+			}
+			idx := i // copy, because this loop will continue assigning to i
+			ret.Body = &idx
+		case "optional":
+			ret.Attributes[name] = i
+			ret.Optional[name] = true
+		default:
+			panic(fmt.Sprintf("invalid hcl field tag kind %q on %s %q", kind, field.Type.String(), field.Name))
+		}
+	}
+
+	return ret
+}
diff --git a/gohcl/schema_test.go b/gohcl/schema_test.go
new file mode 100644
index 0000000..cea77ca
--- /dev/null
+++ b/gohcl/schema_test.go
@@ -0,0 +1,230 @@
+package gohcl
+
+import (
+	"fmt"
+	"reflect"
+	"testing"
+
+	"github.com/davecgh/go-spew/spew"
+	"github.com/hashicorp/hcl/v2"
+)
+
+func TestImpliedBodySchema(t *testing.T) {
+	tests := []struct {
+		val         interface{}
+		wantSchema  *hcl.BodySchema
+		wantPartial bool
+	}{
+		{
+			struct{}{},
+			&hcl.BodySchema{},
+			false,
+		},
+		{
+			struct {
+				Ignored bool
+			}{},
+			&hcl.BodySchema{},
+			false,
+		},
+		{
+			struct {
+				Attr1 bool `hcl:"attr1"`
+				Attr2 bool `hcl:"attr2"`
+			}{},
+			&hcl.BodySchema{
+				Attributes: []hcl.AttributeSchema{
+					{
+						Name:     "attr1",
+						Required: true,
+					},
+					{
+						Name:     "attr2",
+						Required: true,
+					},
+				},
+			},
+			false,
+		},
+		{
+			struct {
+				Attr *bool `hcl:"attr,attr"`
+			}{},
+			&hcl.BodySchema{
+				Attributes: []hcl.AttributeSchema{
+					{
+						Name:     "attr",
+						Required: false,
+					},
+				},
+			},
+			false,
+		},
+		{
+			struct {
+				Thing struct{} `hcl:"thing,block"`
+			}{},
+			&hcl.BodySchema{
+				Blocks: []hcl.BlockHeaderSchema{
+					{
+						Type: "thing",
+					},
+				},
+			},
+			false,
+		},
+		{
+			struct {
+				Thing struct {
+					Type string `hcl:"type,label"`
+					Name string `hcl:"name,label"`
+				} `hcl:"thing,block"`
+			}{},
+			&hcl.BodySchema{
+				Blocks: []hcl.BlockHeaderSchema{
+					{
+						Type:       "thing",
+						LabelNames: []string{"type", "name"},
+					},
+				},
+			},
+			false,
+		},
+		{
+			struct {
+				Thing []struct {
+					Type string `hcl:"type,label"`
+					Name string `hcl:"name,label"`
+				} `hcl:"thing,block"`
+			}{},
+			&hcl.BodySchema{
+				Blocks: []hcl.BlockHeaderSchema{
+					{
+						Type:       "thing",
+						LabelNames: []string{"type", "name"},
+					},
+				},
+			},
+			false,
+		},
+		{
+			struct {
+				Thing *struct {
+					Type string `hcl:"type,label"`
+					Name string `hcl:"name,label"`
+				} `hcl:"thing,block"`
+			}{},
+			&hcl.BodySchema{
+				Blocks: []hcl.BlockHeaderSchema{
+					{
+						Type:       "thing",
+						LabelNames: []string{"type", "name"},
+					},
+				},
+			},
+			false,
+		},
+		{
+			struct {
+				Thing struct {
+					Name      string `hcl:"name,label"`
+					Something string `hcl:"something"`
+				} `hcl:"thing,block"`
+			}{},
+			&hcl.BodySchema{
+				Blocks: []hcl.BlockHeaderSchema{
+					{
+						Type:       "thing",
+						LabelNames: []string{"name"},
+					},
+				},
+			},
+			false,
+		},
+		{
+			struct {
+				Doodad string `hcl:"doodad"`
+				Thing  struct {
+					Name string `hcl:"name,label"`
+				} `hcl:"thing,block"`
+			}{},
+			&hcl.BodySchema{
+				Attributes: []hcl.AttributeSchema{
+					{
+						Name:     "doodad",
+						Required: true,
+					},
+				},
+				Blocks: []hcl.BlockHeaderSchema{
+					{
+						Type:       "thing",
+						LabelNames: []string{"name"},
+					},
+				},
+			},
+			false,
+		},
+		{
+			struct {
+				Doodad string `hcl:"doodad"`
+				Config string `hcl:",remain"`
+			}{},
+			&hcl.BodySchema{
+				Attributes: []hcl.AttributeSchema{
+					{
+						Name:     "doodad",
+						Required: true,
+					},
+				},
+			},
+			true,
+		},
+		{
+			struct {
+				Expr hcl.Expression `hcl:"expr"`
+			}{},
+			&hcl.BodySchema{
+				Attributes: []hcl.AttributeSchema{
+					{
+						Name:     "expr",
+						Required: false,
+					},
+				},
+			},
+			false,
+		},
+		{
+			struct {
+				Meh string `hcl:"meh,optional"`
+			}{},
+			&hcl.BodySchema{
+				Attributes: []hcl.AttributeSchema{
+					{
+						Name:     "meh",
+						Required: false,
+					},
+				},
+			},
+			false,
+		},
+	}
+
+	for _, test := range tests {
+		t.Run(fmt.Sprintf("%#v", test.val), func(t *testing.T) {
+			schema, partial := ImpliedBodySchema(test.val)
+			if !reflect.DeepEqual(schema, test.wantSchema) {
+				t.Errorf(
+					"wrong schema\ngot:  %s\nwant: %s",
+					spew.Sdump(schema), spew.Sdump(test.wantSchema),
+				)
+			}
+
+			if partial != test.wantPartial {
+				t.Errorf(
+					"wrong partial flag\ngot:  %#v\nwant: %#v",
+					partial, test.wantPartial,
+				)
+			}
+		})
+	}
+}
diff --git a/gohcl/types.go b/gohcl/types.go
new file mode 100644
index 0000000..a8d00f8
--- /dev/null
+++ b/gohcl/types.go
@@ -0,0 +1,16 @@
+package gohcl
+
+import (
+	"reflect"
+
+	"github.com/hashicorp/hcl/v2"
+)
+
+var victimExpr hcl.Expression
+var victimBody hcl.Body
+
+var exprType = reflect.TypeOf(&victimExpr).Elem()
+var bodyType = reflect.TypeOf(&victimBody).Elem()
+var blockType = reflect.TypeOf((*hcl.Block)(nil))
+var attrType = reflect.TypeOf((*hcl.Attribute)(nil))
+var attrsType = reflect.TypeOf(hcl.Attributes(nil))
diff --git a/guide/.gitignore b/guide/.gitignore
new file mode 100644
index 0000000..ced5893
--- /dev/null
+++ b/guide/.gitignore
@@ -0,0 +1,2 @@
+env/*
+_build/*
diff --git a/guide/Makefile b/guide/Makefile
new file mode 100644
index 0000000..01f3758
--- /dev/null
+++ b/guide/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+SPHINXPROJ    = HCL
+SOURCEDIR     = .
+BUILDDIR      = _build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+	@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option.  $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+	@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
diff --git a/guide/conf.py b/guide/conf.py
new file mode 100644
index 0000000..099cdcd
--- /dev/null
+++ b/guide/conf.py
@@ -0,0 +1,157 @@
+import subprocess
+import os
+import os.path
+
+# -- Project information -----------------------------------------------------
+
+project = u'HCL'
+copyright = u'2018, HashiCorp'
+author = u'HashiCorp'
+
+if 'READTHEDOCS_VERSION' in os.environ:
+    version_str = os.environ['READTHEDOCS_VERSION']
+else:
+    version_str = subprocess.check_output(['git', 'describe', '--always']).strip()
+
+# The short X.Y version
+version = unicode(version_str)
+# The full version, including alpha/beta/rc tags
+release = unicode(version_str)
+
+
+# -- General configuration ---------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+    'sphinx.ext.todo',
+    'sphinx.ext.githubpages',
+    'sphinxcontrib.golangdomain',
+    'sphinx.ext.autodoc',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This pattern also affects html_static_path and html_extra_path .
+exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store', 'env']
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+
+# -- Options for HTML output -------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'alabaster'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#
+# html_theme_options = {}
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Custom sidebar templates, must be a dictionary that maps document names
+# to template names.
+#
+# The default sidebars (for documents that don't match any pattern) are
+# defined by theme itself.  Builtin themes are using these templates by
+# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
+# 'searchbox.html']``.
+#
+# html_sidebars = {}
+
+
+# -- Options for HTMLHelp output ---------------------------------------------
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'HCLdoc'
+
+
+# -- Options for LaTeX output ------------------------------------------------
+
+latex_elements = {
+    # The paper size ('letterpaper' or 'a4paper').
+    #
+    # 'papersize': 'letterpaper',
+
+    # The font size ('10pt', '11pt' or '12pt').
+    #
+    # 'pointsize': '10pt',
+
+    # Additional stuff for the LaTeX preamble.
+    #
+    # 'preamble': '',
+
+    # Latex figure (float) alignment
+    #
+    # 'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+latex_documents = [
+    (master_doc, 'HCL.tex', u'HCL Documentation',
+     u'HashiCorp', 'manual'),
+]
+
+
+# -- Options for manual page output ------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    (master_doc, 'hcl', u'HCL Documentation',
+     [author], 1)
+]
+
+
+# -- Options for Texinfo output ----------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+    (master_doc, 'HCL', u'HCL Documentation',
+     author, 'HCL', 'One line description of project.',
+     'Miscellaneous'),
+]
+
+
+# -- Extension configuration -------------------------------------------------
+
+# -- Options for todo extension ----------------------------------------------
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
diff --git a/guide/go.rst b/guide/go.rst
new file mode 100644
index 0000000..bd6cef1
--- /dev/null
+++ b/guide/go.rst
@@ -0,0 +1,31 @@
+Using HCL in a Go application
+=============================
+
+HCL is itself written in Go_ and currently it is primarily intended for use as
+a library within other Go programs.
+
+This section describes a number of different ways HCL can be used to define
+and process a configuration language within a Go program. For simple situations,
+HCL can decode directly into Go ``struct`` values in a similar way as encoding
+packages such as ``encoding/json`` and ``encoding/xml``.
+
+The HCL Go API also offers some alternative approaches however, for processing
+languages that may be more complex or that include portions whose expected
+structure cannot be determined until runtime.
+
+The following sections give an overview of different ways HCL can be used in
+a Go program.
+
+.. toctree::
+   :maxdepth: 1
+   :caption: Sub-sections:
+
+   go_parsing
+   go_diagnostics
+   go_decoding_gohcl
+   go_decoding_hcldec
+   go_expression_eval
+   go_decoding_lowlevel
+   go_patterns
+
+.. _Go: https://golang.org/
diff --git a/guide/go_decoding_gohcl.rst b/guide/go_decoding_gohcl.rst
new file mode 100644
index 0000000..7ef2126
--- /dev/null
+++ b/guide/go_decoding_gohcl.rst
@@ -0,0 +1,130 @@
+.. go:package:: gohcl
+
+.. _go-decoding-gohcl:
+
+Decoding Into Native Go Values
+==============================
+
+The most straightforward way to access the content of an HCL file is to
+decode into native Go values using ``reflect``, similar to the technique used
+by packages like ``encoding/json`` and ``encoding/xml``.
+
+Package ``gohcl`` provides functions for this sort of decoding. Function
+``DecodeBody`` attempts to extract values from an HCL *body* and write them
+into a Go value given as a pointer:
+
+.. code-block:: go
+
+   type ServiceConfig struct {
+     Type       string `hcl:"type,label"`
+     Name       string `hcl:"name,label"`
+     ListenAddr string `hcl:"listen_addr"`
+   }
+   type Config struct {
+     IOMode   string          `hcl:"io_mode"`
+     Services []ServiceConfig `hcl:"service,block"`
+   }
+
+   var c Config
+   moreDiags := gohcl.DecodeBody(f.Body, nil, &c)
+   diags = append(diags, moreDiags...)
+
+The above example decodes the *root body* of a file ``f``, presumably loaded
+previously using a parser, into the variable ``c``. The field labels within
+the struct types imply the schema of the expected language, which is a cut-down
+version of the hypothetical language we showed in :ref:`intro`.
+
+The struct field labels consist of two comma-separated values. The first is
+the name of the corresponding argument or block type as it will appear in
+the input file, and the second is the type of element being named. If the
+second value is omitted, it defaults to ``attr``, requesting an attribute.
+
+Nested blocks are represented by a struct or a slice of that struct, and the
+special element type ``label`` within that struct declares that each instance
+of that block type must be followed by one or more block labels. In the above
+example, the ``service`` block type is defined to require two labels, named
+``type`` and ``name``. For label fields in particular, the given name is used
+only to refer to the particular label in error messages when the wrong number
+of labels is used.
+
+By default, all declared attributes and blocks are considered to be required.
+An optional value is indicated by making its field have a pointer type, in
+which case ``nil`` is written to indicate the absense of the argument.
+
+The sections below discuss some additional decoding use-cases. For full details
+on the `gohcl` package, see
+`the godoc reference <https://godoc.org/github.com/hashicorp/hcl/v2/gohcl>`_.
+
+.. _go-decoding-gohcl-evalcontext:
+
+Variables and Functions
+-----------------------
+
+By default, arguments given in the configuration may use only literal values
+and the built in expression language operators, such as arithmetic.
+
+The second argument to ``gohcl.DecodeBody``, shown as ``nil`` in the previous
+example, allows the calling application to additionally offer variables and
+functions for use in expressions. Its value is a pointer to an
+``hcl.EvalContext``, which will be covered in more detail in the later section
+:ref:`go-expression-eval`. For now, a simple example of making the id of the
+current process available as a single variable called ``pid``:
+
+.. code-block:: go
+
+   type Context struct {
+       Pid string
+   }
+   ctx := gohcl.EvalContext(&Context{
+       Pid: os.Getpid()
+   })
+   var c Config
+   moreDiags := gohcl.DecodeBody(f.Body, ctx, &c)
+   diags = append(diags, moreDiags...)
+
+``gohcl.EvalContext`` constructs an expression evaluation context from a Go
+struct value, making the fields available as variables and the methods
+available as functions, after transforming the field and method names such
+that each word (starting with an uppercase letter) is all lowercase and
+separated by underscores.
+
+.. code-block:: hcl
+
+   name = "example-program (${pid})"
+
+Partial Decoding
+----------------
+
+In the examples so far, we've extracted the content from the entire input file
+in a single call to ``DecodeBody``. This is sufficient for many simple
+situations, but sometimes different parts of the file must be evaluated
+separately. For example:
+
+* If different parts of the file must be evaluated with different variables
+  or functions available.
+
+* If the result of evaluating one part of the file is used to set variables
+  or functions in another part of the file.
+
+There are several ways to perform partial decoding with ``gohcl``, all of
+which involve decoding into HCL's own types, such as ``hcl.Body``.
+
+The most general approach is to declare an additional struct field of type
+``hcl.Body``, with the special field tag type ``remain``:
+
+.. code-block:: go
+
+   type ServiceConfig struct {
+     Type       string   `hcl:"type,label"`
+     Name       string   `hcl:"name,label"`
+     ListenAddr string   `hcl:"listen_addr"`
+     Remain     hcl.Body `hcl:",remain"`
+   }
+
+When a ``remain`` field is present, any element of the input body that is
+not matched is retained in a body saved into that field, which can then be
+decoded in a later call, potentially with a different evaluation context.
+
+Another option is to decode an attribute into a value of type `hcl.Expression`,
+which can then be evaluated separately as described in
+:ref:`expression-eval`.
diff --git a/guide/go_decoding_hcldec.rst b/guide/go_decoding_hcldec.rst
new file mode 100644
index 0000000..f05b926
--- /dev/null
+++ b/guide/go_decoding_hcldec.rst
@@ -0,0 +1,242 @@
+.. go:package:: hcldec
+
+.. _go-decoding-hcldec:
+
+Decoding With Dynamic Schema
+============================
+
+In section :ref:`go-decoding-gohcl`, we saw the most straightforward way to
+access the content from an HCL file, decoding directly into a Go value whose
+type is known at application compile time.
+
+For some applications, it is not possible to know the schema of the entire
+configuration when the application is built. For example, `HashiCorp Terraform`_
+uses HCL as the foundation of its configuration language, but parts of the
+configuration are handled by plugins loaded dynamically at runtime, and so
+the schemas for these portions cannot be encoded directly in the Terraform
+source code.
+
+HCL's ``hcldec`` package offers a different approach to decoding that allows
+schemas to be created at runtime, and the result to be decoded into
+dynamically-typed data structures.
+
+The sections below are an overview of the main parts of package ``hcldec``.
+For full details, see
+`the package godoc <https://godoc.org/github.com/hashicorp/hcl/v2/hcldec>`_.
+
+.. _`HashiCorp Terraform`: https://www.terraform.io/
+
+Decoder Specification
+---------------------
+
+Whereas :go:pkg:`gohcl` infers the expected schema by using reflection against
+the given value, ``hcldec`` obtains schema through a decoding *specification*,
+which is a set of instructions for mapping HCL constructs onto a dynamic
+data structure.
+
+The ``hcldec`` package contains a number of different specifications, each
+implementing :go:type:`hcldec.Spec` and having a ``Spec`` suffix on its name.
+Each spec has two distinct functions:
+
+* Adding zero or more validation constraints on the input configuration file.
+
+* Producing a result value based on some elements from the input file.
+
+The most common pattern is for the top-level spec to be a
+:go:type:`hcldec.ObjectSpec` with nested specifications defining either blocks
+or attributes, depending on whether the configuration file will be
+block-structured or flat.
+
+.. code-block:: go
+
+  spec := hcldec.ObjectSpec{
+      "io_mode": &hcldec.AttrSpec{
+          Name: "io_mode",
+          Type: cty.String,
+      },
+      "services": &hcldec.BlockMapSpec{
+          TypeName:   "service",
+          LabelNames: []string{"type", "name"},
+          Nested:     hcldec.ObjectSpec{
+              "listen_addr": &hcldec.AttrSpec{
+                  Name:     "listen_addr",
+                  Type:     cty.String,
+                  Required: true,
+              },
+              "processes": &hcldec.BlockMapSpec{
+                  TypeName:   "service",
+                  LabelNames: []string{"name"},
+                  Nested:     hcldec.ObjectSpec{
+                      "command": &hcldec.AttrSpec{
+                          Name:     "command",
+                          Type:     cty.List(cty.String),
+                          Required: true,
+                      },
+                  },
+              },
+          },
+      },
+  }
+  val, moreDiags := hcldec.Decode(f.Body, spec, nil)
+  diags = append(diags, moreDiags...)
+
+The above specification expects a configuration shaped like our example in
+:ref:`intro`, and calls for it to be decoded into a dynamic data structure
+that would have the following shape if serialized to JSON:
+
+.. code-block:: JSON
+
+  {
+    "io_mode": "async",
+    "services": {
+      "http": {
+        "web_proxy": {
+          "listen_addr": "127.0.0.1:8080",
+          "processes": {
+            "main": {
+              "command": ["/usr/local/bin/awesome-app", "server"]
+            },
+            "mgmt": {
+              "command": ["/usr/local/bin/awesome-app", "mgmt"]
+            }
+          }
+        }
+      }
+    }
+  }
+
+.. go:package:: cty
+
+Types and Values With ``cty``
+-----------------------------
+
+HCL's expression interpreter is implemented in terms of another library called
+:go:pkg:`cty`, which provides a type system which HCL builds on and a robust
+representation of dynamic values in that type system. You could think of
+:go:pkg:`cty` as being a bit like Go's own :go:pkg:`reflect`, but for the
+results of HCL expressions rather than Go programs.
+
+The full details of this system can be found in
+`its own repository <https://github.com/zclconf/go-cty>`_, but this section
+will cover the most important highlights, because ``hcldec`` specifications
+include :go:pkg:`cty` types (as seen in the above example) and its results are
+:go:pkg:`cty` values.
+
+``hcldec`` works directly with :go:pkg:`cty` — as opposed to converting values
+directly into Go native types — because the functionality of the :go:pkg:`cty`
+packages then allows further processing of those values without any loss of
+fidelity or range. For example, :go:pkg:`cty` defines a JSON encoding of its
+values that can be decoded losslessly as long as both sides agree on the value
+type that is expected, which is a useful capability in systems where some sort
+of RPC barrier separates the main program from its plugins.
+
+Types are instances of :go:type:`cty.Type`, and are constructed from functions
+and variables in :go:pkg:`cty` as shown in the above example, where the string
+attributes are typed as ``cty.String``, which is a primitive type, and the list
+attribute is typed as ``cty.List(cty.String)``, which constructs a new list
+type with string elements.
+
+Values are instances of :go:type:`cty.Value`, and can also be constructed from
+functions in :go:pkg:`cty`, using the functions that include ``Val`` in their
+names or using the operation methods available on :go:type:`cty.Value`.
+
+In most cases you will eventually want to use the resulting data as native Go
+types, to pass it to non-:go:pkg:`cty`-aware code. To do this, see the guides
+on
+`Converting between types <https://github.com/zclconf/go-cty/blob/master/docs/convert.md>`_
+(staying within :go:pkg:`cty`) and
+`Converting to and from native Go values <https://github.com/zclconf/go-cty/blob/master/docs/gocty.md>`_.
+
+Partial Decoding
+----------------
+
+Because the ``hcldec`` result is always a value, the input is always entirely
+processed in a single call, unlike with :go:pkg:`gohcl`.
+
+However, both :go:pkg:`gohcl` and :go:pkg:`hcldec` take :go:type:`hcl.Body` as
+the representation of input, and so it is possible and common to mix them both
+in the same program.
+
+A common situation is that :go:pkg:`gohcl` is used in the main program to
+decode the top level of configuration, which then allows the main program to
+determine which plugins need to be loaded to process the leaf portions of
+configuration. In this case, the portions that will be interpreted by plugins
+are retained as opaque :go:type:`hcl.Body` until the plugins have been loaded,
+and then each plugin provides its :go:type:`hcldec.Spec` to allow decoding the
+plugin-specific configuration into a :go:type:`cty.Value` which be
+transmitted to the plugin for further processing.
+
+In our example from :ref:`intro`, perhaps each of the different service types
+is managed by a plugin, and so the main program would decode the block headers
+to learn which plugins are needed, but process the block bodies dynamically:
+
+.. code-block:: go
+
+   type ServiceConfig struct {
+     Type         string   `hcl:"type,label"`
+     Name         string   `hcl:"name,label"`
+     PluginConfig hcl.Body `hcl:",remain"`
+   }
+   type Config struct {
+     IOMode   string          `hcl:"io_mode"`
+     Services []ServiceConfig `hcl:"service,block"`
+   }
+
+   var c Config
+   moreDiags := gohcl.DecodeBody(f.Body, nil, &c)
+   diags = append(diags, moreDiags...)
+   if moreDiags.HasErrors() {
+       // (show diags in the UI)
+       return
+   }
+
+   for _, sc := range c.Services {
+       pluginName := block.Type
+
+       // Totally-hypothetical plugin manager (not part of HCL)
+       plugin, err := pluginMgr.GetPlugin(pluginName)
+       if err != nil {
+           diags = diags.Append(&hcl.Diagnostic{ /* ... */ })
+           continue
+       }
+       spec := plugin.ConfigSpec() // returns hcldec.Spec
+
+       // Decode the block body using the plugin's given specification
+       configVal, moreDiags := hcldec.Decode(sc.PluginConfig, spec, nil)
+       diags = append(diags, moreDiags...)
+       if moreDiags.HasErrors() {
+           continue
+       }
+
+       // Again, hypothetical API within your application itself, and not
+       // part of HCL. Perhaps plugin system serializes configVal as JSON
+       // and sends it over to the plugin.
+       svc := plugin.NewService(configVal)
+       serviceMgr.AddService(sc.Name, svc)
+   }
+
+
+Variables and Functions
+-----------------------
+
+The final argument to ``hcldec.Decode`` is an expression evaluation context,
+just as with ``gohcl.DecodeBlock``.
+
+This object can be constructed using
+:ref:`the gohcl helper function <go-decoding-gohcl-evalcontext>` as before if desired, but
+you can also choose to work directly with :go:type:`hcl.EvalContext` as
+discussed in :ref:`go-expression-eval`:
+
+.. code-block:: go
+
+   ctx := &hcl.EvalContext{
+       Variables: map[string]cty.Value{
+           "pid": cty.NumberIntVal(int64(os.Getpid())),
+       },
+   }
+  val, moreDiags := hcldec.Decode(f.Body, spec, ctx)
+  diags = append(diags, moreDiags...)
+
+As you can see, this lower-level API also uses :go:pkg:`cty`, so it can be
+particularly convenient in situations where the result of dynamically decoding
+one block must be available to expressions in another block.
diff --git a/guide/go_decoding_lowlevel.rst b/guide/go_decoding_lowlevel.rst
new file mode 100644
index 0000000..e0b5e99
--- /dev/null
+++ b/guide/go_decoding_lowlevel.rst
@@ -0,0 +1,199 @@
+.. _go-decoding-lowlevel:
+
+Advanced Decoding With The Low-level API
+========================================
+
+In previous sections we've discussed :go:pkg:`gohcl` and :go:pkg:`hcldec`,
+which both deal with decoding of HCL bodies and the expressions within them
+using a high-level description of the expected configuration schema.
+Both of these packages are implemented in terms of HCL's low-level decoding
+interfaces, which we will explore in this section.
+
+HCL decoding in the low-level API has two distinct phases:
+
+* Structural decoding: analyzing the arguments and nested blocks present in a
+  particular body.
+
+* Expression evaluation: obtaining final values for each argument expression
+  found during structural decoding.
+
+The low-level API gives the calling application full control over when each
+body is decoded and when each expression is evaluated, allowing for more
+complex configuration formats where e.g. different variables are available in
+different contexts, or perhaps expressions within one block can refer to
+values defined in another block.
+
+The low-level API also gives more detailed access to source location
+information for decoded elements, and so may be desirable for applications that
+do a lot of additional validation of decoded data where more specific source
+locations lead to better diagnostic messages.
+
+Since all of the decoding mechanisms work with the same :go:type:`hcl.Body`
+type, it is fine and expected to mix them within an application to get access
+to the more detailed information where needed while using the higher-level APIs
+for the more straightforward portions of a configuration language.
+
+The following subsections will give an overview of the low-level API. For full
+details, see `the godoc reference <https://godoc.org/github.com/hashicorp/hcl/v2/hcl>`_.
+
+Structural Decoding
+-------------------
+
+As seen in prior sections, :go:type:`hcl.Body` is an opaque representation of
+the arguments and child blocks at a particular nesting level. An HCL file has
+a root body containing the top-level elements, and then each nested block has
+its own body presenting its own content.
+
+:go:type:`hcl.Body` is a Go interface whose methods serve as the structural
+decoding API:
+
+.. go:currentpackage:: hcl
+
+.. go:type:: Body
+
+   Represents the structural elements at a particular nesting level.
+
+   .. go:function:: func (b Body) Content(schema *BodySchema) (*BodyContent, Diagnostics)
+
+      Decode the content from the receiving body using the given schema. The
+      schema is considered exhaustive of all content within the body, and so
+      any elements not covered by the schema will generate error diagnostics.
+
+   .. go:function:: func (b Body) PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics)
+
+      Similar to `Content`, but allows for additional arguments and block types
+      that are not described in the given schema. The additional body return
+      value is a special body that contains only the *remaining* elements, after
+      extraction of the ones covered by the schema. This returned body can be
+      used to decode the remaining content elsewhere in the calling program.
+
+   .. go:function:: func (b Body) JustAttributes() (Attributes, Diagnostics)
+
+      Decode the content from the receving body in a special *attributes-only*
+      mode, allowing the calling application to enumerate the arguments given
+      inside the body without needing to predict them in schema.
+
+      When this method is used, a body can be treated somewhat like a map
+      expression, but it still has a rigid structure where the arguments must
+      be given directly with no expression evaluation. This is an advantage for
+      declarations that must themselves be resolved before expression
+      evaluation is possible.
+
+      If the body contains any blocks, error diagnostics are returned. JSON
+      syntax relies on schema to distinguish arguments from nested blocks, and
+      so a JSON body in attributes-only mode will treat all JSON object
+      properties as arguments.
+
+   .. go:function:: func (b Body) MissingItemRange() Range
+
+      Returns a source range that points to where an absent required item in
+      the body might be placed. This is a "best effort" sort of thing, required
+      only to be somewhere inside the receving body, as a way to give source
+      location information for a "missing required argument" sort of error.
+
+The main content-decoding methods each require a :go:type:`hcl.BodySchema`
+object describing the expected content. The fields of this type describe the
+expected arguments and nested block types respectively:
+
+.. code-block:: go
+
+   schema := &hcl.BodySchema{
+       Attributes: []hcl.AttributeSchema{
+           {
+               Name:     "io_mode",
+               Required: false,
+           },
+       },
+       Blocks: []hcl.BlockHeaderSchema{
+           {
+               Type:       "service",
+               LabelNames: []string{"type", "name"},
+           },
+       },
+   }
+   content, moreDiags := body.Content(schema)
+   diags = append(diags, moreDiags...)
+
+:go:type:`hcl.BodyContent` is the result of both ``Content`` and
+``PartialContent``, giving the actual attributes and nested blocks that were
+found. Since arguments are uniquely named within a body and unordered, they
+are returned as a map. Nested blocks are ordered and may have many instances
+of a given type, so they are returned all together in a single slice for
+further interpretation by the caller.
+
+Unlike the two higher-level approaches, the low-level API *always* works only
+with one nesting level at a time. Decoding a nested block returns the "header"
+for that block, giving its type and label values, but its body remains an
+:go:type:`hcl.Body` for later decoding.
+
+Each returned attribute corresponds to one of the arguments in the body, and
+it has an :go:type:`hcl.Expression` object that can be used to obtain a value
+for the argument during expression evaluation, as described in the next
+section.
+
+Expression Evaluation
+---------------------
+
+Expression evaluation *in general* has its own section, imaginitively titled
+:ref:`go-expression-eval`, so this section will focus only on how it is
+achieved in the low-level API.
+
+All expression evaluation in the low-level API starts with an
+:go:type:`hcl.Expression` object. This is another interface type, with various
+implementations depending on the expression type and the syntax it was parsed
+from.
+
+.. go:currentpackage:: hcl
+
+.. go:type:: Expression
+
+   Represents a unevaluated single expression.
+
+   .. go:function:: func (e Expression) Value(ctx *EvalContext) (cty.Value, Diagnostics)
+
+      Evaluates the receiving expression in the given evaluation context. The
+      result is a :go:type:`cty.Value` representing the result value, along
+      with any diagnostics that were raised during evaluation.
+
+      If the diagnostics contains errors, the value may be incomplete or
+      invalid and should either be discarded altogether or used with care for
+      analysis.
+
+   .. go:function:: func (e Expression) Variables() []Traversal
+
+      Returns information about any nested expressions that access variables
+      from the *global* evaluation context. Does not include references to
+      temporary local variables, such as those generated by a
+      "``for`` expression".
+
+   .. go:function:: func (e Expression) Range() Range
+
+      Returns the source range for the entire expression. This can be useful
+      when generating application-specific diagnostic messages, such as
+      value validation errors.
+
+   .. go:function:: func (e Expression) StartRange() Range
+
+      Similar to ``Range``, but if the expression is complex, such as a tuple
+      or object constructor, may indicate only the opening tokens for the
+      construct to avoid creating an overwhelming source code snippet.
+
+      This should be used in diagnostic messages only in situations where the
+      error is clearly with the construct itself and not with the overall
+      expression. For example, a type error indicating that a tuple was not
+      expected might use ``StartRange`` to draw attention to the beginning
+      of a tuple constructor, without highlighting the entire expression.
+
+Method ``Value`` is the primary API for expressions, and takes the same kind
+of evaluation context object described in :ref:`go-expression-eval`.
+
+.. code-block:: go
+
+   ctx := &hcl.EvalContext{
+        Variables: map[string]cty.Value{
+            "name": cty.StringVal("Ermintrude"),
+            "age":  cty.NumberIntVal(32),
+        },
+   }
+   val, moreDiags := expr.Value(ctx)
+   diags = append(diags, moreDiags...)
diff --git a/guide/go_diagnostics.rst b/guide/go_diagnostics.rst
new file mode 100644
index 0000000..a948542
--- /dev/null
+++ b/guide/go_diagnostics.rst
@@ -0,0 +1,97 @@
+.. _go-diagnostics:
+
+Diagnostic Messages
+===================
+
+An important concern for any machine language intended for human authoring is
+to produce good error messages when the input is somehow invalid, or has
+other problems.
+
+HCL uses *diagnostics* to describe problems in an end-user-oriented manner,
+such that the calling application can render helpful error or warning messages.
+The word "diagnostic" is a general term that covers both errors and warnings,
+where errors are problems that prevent complete processing while warnings are
+possible concerns that do not block processing.
+
+HCL deviates from usual Go API practice by returning its own ``hcl.Diagnostics``
+type, instead of Go's own ``error`` type. This allows functions to return
+warnings without accompanying errors while not violating the usual expectation
+that the absense of errors is indicated by a nil ``error``.
+
+In order to easily accumulate and return multiple diagnostics at once, the
+usual pattern for functions returning diagnostics is to gather them in a
+local variable and then return it at the end of the function, or possibly
+earlier if the function cannot continue due to the problems.
+
+.. code-block:: go
+
+  func returningDiagnosticsExample() hcl.Diagnostics {
+      var diags hcl.Diagnostics
+
+      // ...
+
+      // Call a function that may itself produce diagnostics.
+      f, moreDiags := parser.LoadHCLFile("example.conf")
+      // always append, in case warnings are present
+      diags = append(diags, moreDiags...)
+      if diags.HasErrors() {
+        // If we can't safely continue in the presence of errors here, we
+        // can optionally return early.
+        return diags
+      }
+
+      // ...
+
+      return diags
+  }
+
+A common variant of the above pattern is calling another diagnostics-generating
+function in a loop, using ``continue`` to begin the next iteration when errors
+are detected, but still completing all iterations and returning the union of
+all of the problems encountered along the way.
+
+In :ref:`go-parsing`, we saw that the parser can generate diagnostics which
+are related to syntax problems within the loaded file. Further steps to decode
+content from the loaded file can also generate diagnostics related to *semantic*
+problems within the file, such as invalid expressions or type mismatches, and
+so a program using HCL will generally need to accumulate diagnostics across
+these various steps and then render them in the application UI somehow.
+
+Rendering Diagnostics in the UI
+-------------------------------
+
+The best way to render diagnostics to an end-user will depend a lot on the
+type of application: they might be printed into a terminal, written into a
+log for later review, or even shown in a GUI.
+
+HCL leaves the responsibility for rendering diagnostics to the calling
+application, but since rendering to a terminal is a common case for command-line
+tools, the `hcl` package contains a default implementation of this in the
+form of a "diagnostic text writer":
+
+.. code-block:: go
+
+   wr := hcl.NewDiagnosticTextWriter(
+       os.Stdout,      // writer to send messages to
+       parser.Files(), // the parser's file cache, for source snippets
+       78,             // wrapping width
+       true,           // generate colored/highlighted output
+   )
+   wr.WriteDiagnostics(diags)
+
+This default implementation of diagnostic rendering includes relevant lines
+of source code for context, like this:
+
+::
+
+  Error: Unsupported block type
+
+    on example.tf line 4, in resource "aws_instance" "example":
+     2: provisionr "local-exec" {
+
+  Blocks of type "provisionr" are not expected here. Did you mean "provisioner"?
+
+If the "color" flag is enabled, the severity will be additionally indicated by
+a text color and the relevant portion of the source code snippet will be
+underlined to draw further attention.
+
diff --git a/guide/go_expression_eval.rst b/guide/go_expression_eval.rst
new file mode 100644
index 0000000..df0d1d4
--- /dev/null
+++ b/guide/go_expression_eval.rst
@@ -0,0 +1,149 @@
+.. _go-expression-eval:
+
+Expression Evaluation
+=====================
+
+Each argument attribute in a configuration file is interpreted as an
+expression. In the HCL native syntax, certain basic expression functionality
+is always available, such as arithmetic and template strings, and the calling
+application can extend this by making available specific variables and/or
+functions via an *evaluation context*.
+
+We saw in :ref:`go-decoding-gohcl` and :ref:`go-decoding-hcldec` some basic
+examples of populating an evaluation context to make a variable available.
+This section will look more closely at the ``hcl.EvalContext`` type and how
+HCL expression evaluation behaves in different cases.
+
+This section does not discuss in detail the expression syntax itself. For more
+information on that, see the HCL Native Syntax specification.
+
+.. go:currentpackage:: hcl
+
+.. go:type:: EvalContext
+
+   ``hcl.EvalContext`` is the type used to describe the variables and functions
+   available during expression evaluation, if any. Its usage is described in
+   the following sections.
+
+Defining Variables
+------------------
+
+As we saw in :ref:`go-decoding-hcldec`, HCL represents values using an
+underlying library called :go:pkg:`cty`. When defining variables, their values
+must be given as :go:type:`cty.Value` values.
+
+A full description of the types and value constructors in :go:pkg:`cty` is
+in `the reference documentation <https://github.com/zclconf/go-cty/blob/master/docs/types.md>`_.
+Variables in HCL are defined by assigning values into a map from string names
+to :go:type:`cty.Value`:
+
+.. code-block:: go
+
+   ctx := &hcl.EvalContext{
+        Variables: map[string]cty.Value{
+            "name": cty.StringVal("Ermintrude"),
+            "age":  cty.NumberIntVal(32),
+        },
+   }
+
+If this evaluation context were passed to one of the evaluation functions we
+saw in previous sections, the user would be able to refer to these variable
+names in any argument expression appearing in the evaluated portion of
+configuration:
+
+.. code-block:: hcl
+
+   message = "${name} is ${age} ${age == 1 ? "year" : "years"} old!"
+
+If you place ``cty``'s *object* values in the evaluation context, then their
+attributes can be referenced using the HCL attribute syntax, allowing for more
+complex structures:
+
+.. code-block:: go
+
+   ctx := &hcl.EvalContext{
+        Variables: map[string]cty.Value{
+            "path": cty.ObjectVal(map[string]cty.Value{
+                "root":    cty.StringVal(rootDir),
+                "module":  cty.StringVal(moduleDir),
+                "current": cty.StringVal(currentDir),
+            }),
+        },
+   }
+
+.. code-block:: hcl
+
+   source_file = "${path.module}/foo.txt"
+
+.. _go-expression-funcs:
+
+Defining Functions
+------------------
+
+Custom functions can be defined by you application to allow users of its
+language to transform data in application-specific ways. The underlying
+function mechanism is also provided by :go:pkg:`cty`, allowing you to define
+the arguments a given function expects, what value type it will return for
+given argument types, etc. The full functions model is described in the
+``cty`` documentation section
+`Functions System <https://github.com/zclconf/go-cty/blob/master/docs/functions.md>`_.
+
+There are `a number of "standard library" functions <https://godoc.org/github.com/apparentlymart/go-cty/cty/function/stdlib>`_
+available in a ``stdlib`` package within the :go:pkg:`cty` repository, avoiding
+the need for each application to re-implement basic functions for string
+manipulation, list manipulation, etc. It also includes function-shaped versions
+of several operations that are native operators in HCL, which should generally
+*not* be exposed as functions in HCL-based configuration formats to avoid user
+confusion.
+
+You can define functions in the ``Functions`` field of :go:type:`hcl.EvalContext`:
+
+.. code-block:: go
+
+   ctx := &hcl.EvalContext{
+        Variables: map[string]cty.Value{
+            "name": cty.StringVal("Ermintrude"),
+        },
+        Functions: map[string]function.Function{
+            "upper":  stdlib.UpperFunc,
+            "lower":  stdlib.LowerFunc,
+            "min":    stdlib.MinFunc,
+            "max":    stdlib.MaxFunc,
+            "strlen": stdlib.StrlenFunc,
+            "substr": stdlib.SubstrFunc,
+        },
+   }
+
+If this evaluation context were passed to one of the evaluation functions we
+saw in previous sections, the user would be able to call any of these functions
+in any argument expression appearing in the evaluated portion of configuration:
+
+.. code-block:: hcl
+
+   message = "HELLO, ${upper(name)}!"
+
+Expression Evaluation Modes
+---------------------------
+
+HCL uses a different expression evaluation mode depending on the evaluation
+context provided. In HCL native syntax, evaluation modes are used to provide
+more relevant error messages. In JSON syntax, which embeds the native
+expression syntax in strings using "template" syntax, the evaluation mode
+determines whether strings are evaluated as templates at all.
+
+If the given :go:type:`hcl.EvalContext` is ``nil``, native syntax expressions
+will react to users attempting to refer to variables or functions by producing
+errors indicating that these features are not available at all, rather than
+by saying that the specific variable or function does not exist. JSON syntax
+strings will not be evaluated as templates *at all* in this mode, making them
+function as literal strings.
+
+If the evaluation context is non-``nil`` but either ``Variables`` or
+``Functions`` within it is ``nil``, native syntax will similarly produce
+"not supported" error messages. JSON syntax strings *will* parse templates
+in this case, but can also generate "not supported" messages if e.g. the
+user accesses a variable when the variables map is ``nil``.
+
+If neither map is ``nil``, HCL assumes that both variables and functions are
+supported and will instead produce error messages stating that the specific
+variable or function accessed by the user is not defined.
diff --git a/guide/go_parsing.rst b/guide/go_parsing.rst
new file mode 100644
index 0000000..77345fc
--- /dev/null
+++ b/guide/go_parsing.rst
@@ -0,0 +1,64 @@
+.. _go-parsing:
+
+Parsing HCL Input
+=================
+
+The first step in processing HCL input provided by a user is to parse it.
+Parsing turns the raw bytes from an input file into a higher-level
+representation of the arguments and blocks, ready to be *decoded* into an
+application-specific form.
+
+The main entry point into HCL parsing is :go:pkg:`hclparse`, which provides
+:go:type:`hclparse.Parser`:
+
+.. code-block:: go
+
+  parser := hclparse.NewParser()
+  f, diags := parser.ParseHCLFile("server.conf")
+
+Variable ``f`` is then a pointer to an :go:type:`hcl.File`, which is an
+opaque abstract representation of the file, ready to be decoded.
+
+Variable ``diags`` describes any errors or warnings that were encountered
+during processing; HCL conventionally uses this in place of the usual ``error``
+return value in Go, to allow returning a mixture of multiple errors and
+warnings together with enough information to present good error messages to the
+user. We'll cover this in more detail in the next section,
+:ref:`go-diagnostics`.
+
+.. go:package:: hclparse
+
+Package ``hclparse``
+--------------------
+
+.. go:type:: Parser
+
+  .. go:function:: func NewParser() *Parser
+
+      Constructs a new parser object. Each parser contains a cache of files
+      that have already been read, so repeated calls to load the same file
+      will return the same object.
+
+  .. go:function:: func (*Parser) ParseHCL(src []byte, filename string) (*hcl.File, hcl.Diagnostics)
+
+     Parse the given source code as HCL native syntax, saving the result into
+     the parser's file cache under the given filename.
+
+  .. go:function:: func (*Parser) ParseHCLFile(filename string) (*hcl.File, hcl.Diagnostics)
+
+     Parse the contents of the given file as HCL native syntax. This is a
+     convenience wrapper around ParseHCL that first reads the file into memory.
+
+  .. go:function:: func (*Parser) ParseJSON(src []byte, filename string) (*hcl.File, hcl.Diagnostics)
+
+     Parse the given source code as JSON syntax, saving the result into
+     the parser's file cache under the given filename.
+
+  .. go:function:: func (*Parser) ParseJSONFile(filename string) (*hcl.File, hcl.Diagnostics)
+
+     Parse the contents of the given file as JSON syntax. This is a
+     convenience wrapper around ParseJSON that first reads the file into memory.
+
+The above list just highlights the main functions in this package.
+For full documentation, see
+`the hclparse godoc <https://godoc.org/github.com/hashicorp/hcl/v2/hclparse>`_.
diff --git a/guide/go_patterns.rst b/guide/go_patterns.rst
new file mode 100644
index 0000000..85a9022
--- /dev/null
+++ b/guide/go_patterns.rst
@@ -0,0 +1,315 @@
+Design Patterns for Complex Systems
+===================================
+
+In previous sections we've seen an overview of some different ways an
+application can decode a language its has defined in terms of the HCL grammar.
+For many applications, those mechanisms are sufficient. However, there are
+some more complex situations that can benefit from some additional techniques.
+This section lists a few of these situations and ways to use the HCL API to
+accommodate them.
+
+.. _go-interdep-blocks:
+
+Interdependent Blocks
+---------------------
+
+In some configuration languages, the variables available for use in one
+configuration block depend on values defined in other blocks.
+
+For example, in Terraform many of the top-level constructs are also implicitly
+definitions of values that are available for use in expressions elsewhere:
+
+.. code-block:: hcl
+
+   variable "network_numbers" {
+     type = list(number)
+   }
+
+   variable "base_network_addr" {
+     type    = string
+     default = "10.0.0.0/8"
+   }
+
+   locals {
+     network_blocks = {
+       for x in var.number:
+       x => cidrsubnet(var.base_network_addr, 8, x)
+     }
+   }
+
+   resource "cloud_subnet" "example" {
+     for_each = local.network_blocks
+
+     cidr_block = each.value
+   }
+
+   output "subnet_ids" {
+     value = cloud_subnet.example[*].id
+   }
+
+In this example, the ``variable "network_numbers"`` block makes
+``var.network_numbers`` available to expressions, the
+``resource "cloud_subnet" "example"`` block makes ``cloud_subnet.example``
+available, etc.
+
+Terraform achieves this by decoding the top-level structure in isolation to
+start. You can do this either using the low-level API or using :go:pkg:`gohcl`
+with :go:type:`hcl.Body` fields tagged as "remain".
+
+Once you have a separate body for each top-level block, you can inspect each
+of the attribute expressions inside using the ``Variables`` method on
+:go:type:`hcl.Expression`, or the ``Variables`` function from package
+:go:pkg:`hcldec` if you will eventually use its higher-level API to decode as
+Terraform does.
+
+The detected variable references can then be used to construct a dependency
+graph between the blocks, and then perform a
+`topological sort <https://en.wikipedia.org/wiki/Topological_sorting>`_ to
+determine the correct order to evaluate each block's contents so that values
+will always be available before they are needed.
+
+Since :go:pkg:`cty` values are immutable, it is not convenient to directly
+change values in a :go:type:`hcl.EvalContext` during this gradual evaluation,
+so instead construct a specialized data structure that has a separate value
+per object and construct an evaluation context from that each time a new
+value becomes available.
+
+Using :go:pkg:`hcldec` to evaluate block bodies is particularly convenient in
+this scenario because it produces :go:type:`cty.Value` results which can then
+just be directly incorporated into the evaluation context.
+
+Distributed Systems
+-------------------
+
+Distributed systems cause a number of extra challenges, and configuration
+management is rarely the worst of these. However, there are some specific
+considerations for using HCL-based configuration in distributed systems.
+
+For the sake of this section, we are concerned with distributed systems where
+at least two separate components both depend on the content of HCL-based
+configuration files. Real-world examples include the following:
+
+* **HashiCorp Nomad** loads configuration (job specifications) in its servers
+  but also needs these results in its clients and in its various driver plugins.
+
+* **HashiCorp Terraform** parses configuration in Terraform Core but can write
+  a partially-evaluated execution plan to disk and continue evaluation in a
+  separate process later. It must also pass configuration values into provider
+  plugins.
+
+Broadly speaking, there are two approaches to allowing configuration to be
+accessed in multiple subsystems, which the following subsections will discuss
+separately.
+
+Ahead-of-time Evaluation
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+Ahead-of-time evaluation is the simplest path, with the configuration files
+being entirely evaluated on entry to the system, and then only the resulting
+*constant values* being passed between subsystems.
+
+This approach is relatively straightforward because the resulting
+:go:type:`cty.Value` results can be losslessly serialized as either JSON or
+msgpack as long as all system components agree on the expected value types.
+Aside from passing these values around "on the wire", parsing and decoding of
+configuration proceeds as normal.
+
+Both Nomad and Terraform use this approach for interacting with *plugins*,
+because the plugins themselves are written by various different teams that do
+not coordinate closely, and so doing all expression evaluation in the core
+subsystems ensures consistency between plugins and simplifies plugin development.
+
+In both applications, the plugin is expected to describe (using an
+application-specific protocol) the schema it expects for each element of
+configuration it is responsible for, allowing the core subsystems to perform
+decoding on the plugin's behalf and pass a value that is guaranteed to conform
+to the schema.
+
+Gradual Evaluation
+^^^^^^^^^^^^^^^^^^
+
+Although ahead-of-time evaluation is relatively straightforward, it has the
+significant disadvantage that all data available for access via variables or
+functions must be known by whichever subsystem performs that initial
+evaluation.
+
+For example, in Terraform, the "plan" subcommand is responsible for evaluating
+the configuration and presenting to the user an execution plan for approval, but
+certain values in that plan cannot be determined until the plan is already
+being applied, since the specific values used depend on remote API decisions
+such as the allocation of opaque id strings for objects.
+
+In Terraform's case, both the creation of the plan and the eventual apply
+of that plan *both* entail evaluating configuration, with the apply step
+having a more complete set of input values and thus producing a more complete
+result. However, this means that Terraform must somehow make the expressions
+from the original input configuration available to the separate process that
+applies the generated plan.
+
+Good usability requires error and warning messages that are able to refer back
+to specific sections of the input configuration as context for the reported
+problem, and the best way to achieve this in a distributed system doing
+gradual evaluation is to send the configuration *source code* between
+subsystems. This is generally the most compact representation that retains
+source location information, and will avoid any inconsistency caused by
+introducing another intermediate serialization.
+
+In Terraform's, for example, the serialized plan incorporates both the data
+structure describing the partial evaluation results from the plan phase and
+the original configuration files that produced those results, which can then
+be re-evalauated during the apply step.
+
+In a gradual evaluation scenario, the application should verify correctness of
+the input configuration as completely as possible at each state. To help with
+this, :go:pkg:`cty` has the concept of
+`unknown values <https://github.com/zclconf/go-cty/blob/master/docs/concepts.md#unknown-values-and-the-dynamic-pseudo-type>`_,
+which can stand in for values the application does not yet know while still
+retaining correct type information. HCL expression evaluation reacts to unknown
+values by performing type checking but then returning another unknown value,
+causing the unknowns to propagate through expressions automatically.
+
+.. code-block:: go
+
+   ctx := &hcl.EvalContext{
+        Variables: map[string]cty.Value{
+            "name": cty.UnknownVal(cty.String),
+            "age":  cty.UnknownVal(cty.Number),
+        },
+   }
+   val, moreDiags := expr.Value(ctx)
+   diags = append(diags, moreDiags...)
+
+Each time an expression is re-evaluated with additional information, fewer of
+the input values will be unknown and thus more of the result will be known.
+Eventually the application should evaluate the expressions with no unknown
+values at all, which then guarantees that the result will also be wholly-known.
+
+Static References, Calls, Lists, and Maps
+-----------------------------------------
+
+In most cases, we care more about the final result value of an expression than
+how that value was obtained. A particular list argument, for example, might
+be defined by the user via a tuple constructor, by a `for` expression, or by
+assigning the value of a variable that has a suitable list type.
+
+In some special cases, the structure of the expression is more important than
+the result value, or an expression may not *have* a reasonable result value.
+For example, in Terraform there are a few arguments that call for the user
+to name another object by reference, rather than provide an object value:
+
+.. code-block:: hcl
+
+   resource "cloud_network" "example" {
+     # ...
+   }
+
+   resource "cloud_subnet" "example" {
+     cidr_block = "10.1.2.0/24"
+
+     depends_on = [
+       cloud_network.example,
+     ]
+   }
+
+The ``depends_on`` argument in the second ``resource`` block *appears* as an
+expression that would construct a single-element tuple containing an object
+representation of the first resource block. However, Terraform uses this
+expression to construct its dependency graph, and so it needs to see
+specifically that this expression refers to ``cloud_network.example``, rather
+than determine a result value for it.
+
+HCL offers a number of "static analysis" functions to help with this sort of
+situation. These all live in the :go:pkg:`hcl` package, and each one imposes
+a particular requirement on the syntax tree of the expression it is given,
+and returns a result derived from that if the expression conforms to that
+requirement.
+
+.. go:currentpackage:: hcl
+
+.. go:function:: func ExprAsKeyword(expr Expression) string
+
+   This function attempts to interpret the given expression as a single keyword,
+   returning that keyword as a string if possible.
+
+   A "keyword" for the purposes of this function is an expression that can be
+   understood as a valid single identifier. For example, the simple variable
+   reference ``foo`` can be interpreted as a keyword, while ``foo.bar``
+   cannot.
+
+   As a special case, the language-level keywords ``true``, ``false``, and
+   ``null`` are also considered to be valid keywords, allowing the calling
+   application to disregard their usual meaning.
+
+   If the given expression cannot be reduced to a single keyword, the result
+   is an empty string. Since an empty string is never a valid keyword, this
+   result unambiguously signals failure.
+
+.. go:function:: func AbsTraversalForExpr(expr Expression) (Traversal, Diagnostics)
+
+   This is a generalization of ``ExprAsKeyword`` that will accept anything that
+   can be interpreted as a *traversal*, which is a variable name followed by
+   zero or more attribute access or index operators with constant operands.
+
+   For example, all of ``foo``, ``foo.bar`` and ``foo[0]`` are valid
+   traversals, but ``foo[bar]`` is not, because the ``bar`` index is not
+   constant.
+
+   This is the function that Terraform uses to interpret the items within the
+   ``depends_on`` sequence in our example above.
+
+   As with ``ExprAsKeyword``, this function has a special case that the
+   keywords ``true``, ``false``, and ``null`` will be accepted as if they were
+   variable names by this function, allowing ``null.foo`` to be interpreted
+   as a traversal even though it would be invalid if evaluated.
+
+   If error diagnostics are returned, the traversal result is invalid and
+   should not be used.
+
+.. go:function:: func RelTraversalForExpr(expr Expression) (Traversal, Diagnostics)
+
+   This is very similar to ``AbsTraversalForExpr``, but the result is a
+   *relative* traversal, which is one whose first name is considered to be
+   an attribute of some other (implied) object.
+
+   The processing rules are identical to ``AbsTraversalForExpr``, with the
+   only exception being that the first element of the returned traversal is
+   marked as being an attribute, rather than as a root variable.
+
+.. go:function:: func ExprList(expr Expression) ([]Expression, Diagnostics)
+
+   This function requires that the given expression be a tuple constructor,
+   and if so returns a slice of the element expressions in that constructor.
+   Applications can then perform further static analysis on these, or evaluate
+   them as normal.
+
+   If error diagnostics are returned, the result is invalid and should not be
+   used.
+
+   This is the fucntion that Terraform uses to interpret the expression
+   assigned to ``depends_on`` in our example above, then in turn using
+   ``AbsTraversalForExpr`` on each enclosed expression.
+
+.. go:function:: func ExprMap(expr Expression) ([]KeyValuePair, Diagnostics)
+
+   This function requires that the given expression be an object constructor,
+   and if so returns a slice of the element key/value pairs in that constructor.
+   Applications can then perform further static analysis on these, or evaluate
+   them as normal.
+
+   If error diagnostics are returned, the result is invalid and should not be
+   used.
+
+.. go:function:: func ExprCall(expr Expression) (*StaticCall, Diagnostics)
+
+   This function requires that the given expression be a function call, and
+   if so returns an object describing the name of the called function and
+   expression objects representing the call arguments.
+
+   If error diagnostics are returned, the result is invalid and should not be
+   used.
+
+The ``Variables`` method on :go:type:`hcl.Expression` is also considered to be
+a "static analysis" helper, but is built in as a fundamental feature because
+analysis of referenced variables is often important for static validation and
+for implementing interdependent blocks as we saw in the section above.
+
diff --git a/guide/index.rst b/guide/index.rst
new file mode 100644
index 0000000..6a70f7e
--- /dev/null
+++ b/guide/index.rst
@@ -0,0 +1,35 @@
+HCL Config Language Toolkit
+===========================
+
+HCL is a toolkit for creating structured configuration languages that are both
+human- and machine-friendly, for use with command-line tools, servers, etc.
+
+HCL has both a native syntax, intended to be pleasant to read and write for
+humans, and a JSON-based variant that is easier for machines to generate and
+parse. The native syntax is inspired by libucl_, `nginx configuration`_, and
+others.
+
+It includes an expression syntax that allows basic inline computation and, with
+support from the calling application, use of variables and functions for more
+dynamic configuration languages.
+
+HCL provides a set of constructs that can be used by a calling application to
+construct a configuration language. The application defines which argument
+names and nested block types are expected, and HCL parses the configuration
+file, verifies that it conforms to the expected structure, and returns
+high-level objects that the application can use for further processing.
+
+At present, HCL is primarily intended for use in applications written in Go_,
+via its library API.
+
+.. toctree::
+   :maxdepth: 1
+   :caption: Contents:
+
+   intro
+   go
+   language_design
+
+.. _libucl: https://github.com/vstakhov/libucl
+.. _`nginx configuration`: http://nginx.org/en/docs/beginners_guide.html#conf_structure
+.. _Go: https://golang.org/
diff --git a/guide/intro.rst b/guide/intro.rst
new file mode 100644
index 0000000..d089a11
--- /dev/null
+++ b/guide/intro.rst
@@ -0,0 +1,108 @@
+.. _intro:
+
+Introduction to HCL
+===================
+
+HCL-based configuration is built from two main constructs: arguments and
+blocks. The following is an example of a configuration language for a
+hypothetical application:
+
+.. code-block:: hcl
+
+  io_mode = "async"
+
+  service "http" "web_proxy" {
+    listen_addr = "127.0.0.1:8080"
+
+    process "main" {
+      command = ["/usr/local/bin/awesome-app", "server"]
+    }
+
+    process "mgmt" {
+      command = ["/usr/local/bin/awesome-app", "mgmt"]
+    }
+  }
+
+In the above example, ``io_mode`` is a top-level argument, while ``service``
+introduces a block. Within the body of a block, further arguments and nested
+blocks are allowed. A block type may also expect a number of *labels*, which
+are the quoted names following the ``service`` keyword in the above example.
+
+The specific keywords ``io_mode``, ``service``, ``process``, etc here are
+application-defined. HCL provides the general block structure syntax, and
+can validate and decode configuration based on the application's provided
+schema.
+
+HCL is a structured configuration language rather than a data structure
+serialization language. This means that unlike languages such as JSON, YAML,
+or TOML, HCL is always decoded using an application-defined schema.
+
+However, HCL does have a JSON-based alternative syntax, which allows the same
+structure above to be generated using a standard JSON serializer when users
+wish to generate configuration programmatically rather than hand-write it:
+
+.. code-block:: json
+
+  {
+    "io_mode": "async",
+    "service": {
+      "http": {
+        "web_proxy": {
+          "listen_addr": "127.0.0.1:8080",
+          "process": {
+            "main": {
+              "command": ["/usr/local/bin/awesome-app", "server"]
+            },
+            "mgmt": {
+              "command": ["/usr/local/bin/awesome-app", "mgmt"]
+            },
+          }
+        }
+      }
+    }
+  }
+
+The calling application can choose which syntaxes to support. JSON syntax may
+not be important or desirable for certain applications, but it is available for
+applications that need it. The schema provided by the calling application
+allows JSON input to be properly decoded even though JSON syntax is ambiguous
+in various ways, such as whether a JSON object is representing a nested block
+or an object expression.
+
+The collection of arguments and blocks at a particular nesting level is called
+a *body*. A file always has a root body containing the top-level elements,
+and each block also has its own body representing the elements within it.
+
+The term "attribute" can also be used to refer to what we've called an
+"argument" so far. The term "attribute" is also used for the fields of an
+object value in argument expressions, and so "argument" is used to refer
+specifically to the type of attribute that appears directly within a body.
+
+The above examples show the general "texture" of HCL-based configuration. The
+full details of the syntax are covered in the language specifications.
+
+.. todo:: Once the language specification documents have settled into a
+   final location, link them from above.
+
+Argument Expressions
+--------------------
+
+The value of an argument can be a literal value shown above, or it may be an
+expression to allow arithmetic, deriving one value from another, etc.
+
+.. code-block:: hcl
+
+  listen_addr = env.LISTEN_ADDR
+
+Built-in arithmetic and comparison operators are automatically available in all
+HCL-based configuration languages. A calling application may optionally
+provide variables that users can reference, like ``env`` in the above example,
+and custom functions to transform values in application-specific ways.
+
+Full details of the expression syntax are in the HCL native syntax
+specification. Since JSON does not have an expression syntax, JSON-based
+configuration files use the native syntax expression language embedded inside
+JSON strings.
+
+.. todo:: Once the language specification documents have settled into a
+   final location, link to the native syntax specification from above.
diff --git a/guide/language_design.rst b/guide/language_design.rst
new file mode 100644
index 0000000..d83202a
--- /dev/null
+++ b/guide/language_design.rst
@@ -0,0 +1,318 @@
+Configuration Language Design
+=============================
+
+In this section we will cover some conventions for HCL-based configuration
+languages that can help make them feel consistent with other HCL-based
+languages, and make the best use of HCL's building blocks.
+
+HCL's native and JSON syntaxes both define a mapping from input bytes to a
+higher-level information model. In designing a configuration language based on
+HCL, your building blocks are the components in that information model:
+blocks, arguments, and expressions.
+
+Each calling application of HCL, then, effectively defines its own language.
+Just as Atom and RSS are higher-level languages built on XML, HashiCorp
+Terraform has a higher-level language built on HCL, while HashiCorp Nomad has
+its own distinct language that is *also* built on HCL.
+
+From an end-user perspective, these are distinct languages but have a common
+underlying texture. Users of both are therefore likely to bring some
+expectations from one to the other, and so this section is an attempt to
+codify some of these shared expectations to reduce user surprise.
+
+These are subjective guidelines however, and so applications may choose to
+ignore them entirely or ignore them in certain specialized cases. An
+application providing a configuration language for a pre-existing system, for
+example, may choose to eschew the identifier naming conventions in this section
+in order to exactly match the existing names in that underlying system.
+
+Language Keywords and Identifiers
+---------------------------------
+
+Much of the work in defining an HCL-based language is in selecting good names
+for arguments, block types, variables, and functions.
+
+The standard for naming in HCL is to use all-lowercase identifiers with
+underscores separating words, like ``service`` or ``io_mode``. HCL identifiers
+do allow uppercase letters and dashes, but this primarily for natural
+interfacing with external systems that may have other identifier conventions,
+and so these should generally be avoided for the identifiers native to your
+own language.
+
+The distinction between "keywords" and other identifiers is really just a
+convention. In your own language documentation, you may use the word "keyword"
+to refer to names that are presented as an intrinsic part of your language,
+such as important top-level block type names.
+
+Block type names are usually singular, since each block defines a single
+object. Use a plural block name only if the block is serving only as a
+namespacing container for a number of other objects. A block with a plural
+type name will generally contain only nested blocks, and no arguments of its
+own.
+
+Argument names are also singular unless they expect a collection value, in
+which case they should be plural. For example, ``name = "foo"`` but
+``subnet_ids = ["abc", "123"]``.
+
+Function names will generally *not* use underscores and will instead just run
+words together, as is common in the C standard library. This is a result of
+the fact that several of the standard library functions offered in ``cty``
+(covered in a later section) have names that follow C library function names
+like ``substr``. This is not a strong rule, and applications that use longer
+names may choose to use underscores for them to improve readability.
+
+Blocks vs. Object Values
+------------------------
+
+HCL blocks and argument values of object type have quite a similar appearance
+in the native syntax, and are identical in JSON syntax:
+
+.. code-block:: hcl
+
+   block {
+     foo = bar
+   }
+
+   # argument with object constructor expression
+   argument = {
+     foo = bar
+   }
+
+In spite of this superficial similarity, there are some important differences
+between these two forms.
+
+The most significant difference is that a child block can contain nested blocks
+of its own, while an object constructor expression can define only attributes
+of the object it is creating.
+
+The user-facing model for blocks is that they generally form the more "rigid"
+structure of the language itself, while argument values can be more free-form.
+An application will generally define in its schema and documentation all of
+the arguments that are valid for a particular block type, while arguments
+accepting object constructors are more appropriate for situations where the
+arguments themselves are freely selected by the user, such as when the
+expression will be converted by the application to a map type.
+
+As a less contrived example, consider the ``resource`` block type in Terraform
+and its use with a particular resource type ``aws_instance``:
+
+.. code-block:: hcl
+
+   resource "aws_instance" "example" {
+     ami           = "ami-abc123"
+     instance_type = "t2.micro"
+
+     tags = {
+       Name = "example instance"
+     }
+
+     ebs_block_device {
+       device_name = "hda1"
+       volume_size = 8
+       volume_type = "standard"
+     }
+   }
+
+The top-level block type ``resource`` is fundamental to Terraform itself and
+so an obvious candidate for block syntax: it maps directly onto an object in
+Terraform's own domain model.
+
+Within this block we see a mixture of arguments and nested blocks, all defined
+as part of the schema of the ``aws_instance`` resource type. The ``tags``
+map here is specified as an argument because its keys are free-form, chosen
+by the user and mapped directly onto a map in the underlying system.
+``ebs_block_device`` is specified as a nested block, because it is a separate
+domain object within the remote system and has a rigid schema of its own.
+
+As a special case, block syntax may sometimes be used with free-form keys if
+those keys each serve as a separate declaration of some first-class object
+in the language. For example, Terraform has a top-level block type ``locals``
+which behaves in this way:
+
+.. code-block:: hcl
+
+   locals {
+     instance_type = "t2.micro"
+     instance_id   = aws_instance.example.id
+   }
+
+Although the argument names in this block are arbitrarily selected by the
+user, each one defines a distinct top-level object. In other words, this
+approach is used to create a more ergonomic syntax for defining these simple
+single-expression objects, as a pragmatic alternative to more verbose and
+redundant declarations using blocks:
+
+.. code-block:: hcl
+
+   local "instance_type" {
+     value = "t2.micro"
+   }
+   local "instance_id" {
+     value = aws_instance.example.id
+   }
+
+The distinction between domain objects, language constructs and user data will
+always be subjective, so the final decision is up to you as the language
+designer.
+
+Standard Functions
+------------------
+
+HCL itself does not define a common set of functions available in all HCL-based
+languages; the built-in language operators give a baseline of functionality
+that is always available, but applications are free to define functions as they
+see fit.
+
+With that said, there's a number of generally-useful functions that don't
+belong to the domain of any one application: string manipulation, sequence
+manipulation, date formatting, JSON serialization and parsing, etc.
+
+Given the general need such functions serve, it's helpful if a similar set of
+functions is available with compatible behavior across multiple HCL-based
+languages, assuming the language is for an application where function calls
+make sense at all.
+
+The Go implementation of HCL is built on an underlying type and function system
+:go:pkg:`cty`, whose usage was introduced in :ref:`go-expression-funcs`. That
+library also has a package of "standard library" functions which we encourage
+applications to offer with consistent names and compatible behavior, either by
+using the standard implementations directly or offering compatible
+implementations under the same name.
+
+The "standard" functions that new configuration formats should consider
+offering are:
+
+* ``abs(number)`` - returns the absolute (positive) value of the given number.
+* ``coalesce(vals...)`` - returns the value of the first argument that isn't null. Useful only in formats where null values may appear.
+* ``compact(vals...)`` - returns a new tuple with the non-null values given as arguments, preserving order.
+* ``concat(seqs...)`` - builds a tuple value by concatenating together all of the given sequence (list or tuple) arguments.
+* ``format(fmt, args...)`` - performs simple string formatting similar to the C library function ``printf``.
+* ``hasindex(coll, idx)`` - returns true if the given collection has the given index. ``coll`` may be of list, tuple, map, or object type.
+* ``int(number)`` - returns the integer component of the given number, rounding towards zero.
+* ``jsondecode(str)`` - interprets the given string as JSON format and return the corresponding decoded value.
+* ``jsonencode(val)`` - encodes the given value as a JSON string.
+* ``length(coll)`` - returns the length of the given collection.
+* ``lower(str)`` - converts the letters in the given string to lowercase, using Unicode case folding rules.
+* ``max(numbers...)`` - returns the highest of the given number values.
+* ``min(numbers...)`` - returns the lowest of the given number values.
+* ``sethas(set, val)`` - returns true only if the given set has the given value as an element.
+* ``setintersection(sets...)`` - returns the intersection of the given sets
+* ``setsubtract(set1, set2)`` - returns a set with the elements from ``set1`` that are not also in ``set2``.
+* ``setsymdiff(sets...)`` - returns the symmetric difference of the given sets.
+* ``setunion(sets...)`` - returns the union of the given sets.
+* ``strlen(str)`` - returns the length of the given string in Unicode grapheme clusters.
+* ``substr(str, offset, length)`` - returns a substring from the given string by splitting it between Unicode grapheme clusters.
+* ``timeadd(time, duration)`` - takes a timestamp in RFC3339 format and a possibly-negative duration given as a string like ``"1h"`` (for "one hour") and returns a new RFC3339 timestamp after adding the duration to the given timestamp.
+* ``upper(str)`` - converts the letters in the given string to uppercase, using Unicode case folding rules.
+
+Not all of these functions will make sense in all applications. For example, an
+application that doesn't use set types at all would have no reason to provide
+the set-manipulation functions here.
+
+Some languages will not provide functions at all, since they are primarily for
+assigning values to arguments and thus do not need nor want any custom
+computations of those values.
+
+Block Results as Expression Variables
+-------------------------------------
+
+In some applications, top-level blocks serve also as declarations of variables
+(or of attributes of object variables) available during expression evaluation,
+as discussed in :ref:`go-interdep-blocks`.
+
+In this case, it's most intuitive for the variables map in the evaluation
+context to contain an value named after each valid top-level block
+type and for these values to be object-typed or map-typed and reflect the
+structure implied by block type labels.
+
+For example, an application may have a top-level ``service`` block type
+used like this:
+
+.. code-block:: hcl
+
+  service "http" "web_proxy" {
+    listen_addr = "127.0.0.1:8080"
+
+    process "main" {
+      command = ["/usr/local/bin/awesome-app", "server"]
+    }
+
+    process "mgmt" {
+      command = ["/usr/local/bin/awesome-app", "mgmt"]
+    }
+  }
+
+If the result of decoding this block were available for use in expressions
+elsewhere in configuration, the above convention would call for it to be
+available to expressions as an object at ``service.http.web_proxy``.
+
+If it the contents of the block itself that are offered to evaluation -- or
+a superset object *derived* from the block contents -- then the block arguments
+can map directly to object attributes, but it is up to the application to
+decide which value type is most appropriate for each block type, since this
+depends on how multiple blocks of the same type relate to one another, or if
+multiple blocks of that type are even allowed.
+
+In the above example, an application would probably expose the ``listen_addr``
+argument value as ``service.http.web_proxy.listen_addr``, and may choose to
+expose the ``process`` blocks as a map of objects using the labels as keys,
+which would allow an expression like
+``service.http.web_proxy.service["main"].command``.
+
+If multiple blocks of a given type do not have a significant order relative to
+one another, as seems to be the case with these ``process`` blocks,
+representation as a map is often the most intuitive. If the ordering of the
+blocks *is* significant then a list may be more appropriate, allowing the use
+of HCL's "splat operators" for convenient access to child arguments. However,
+there is no one-size-fits-all solution here and language designers must
+instead consider the likely usage patterns of each value and select the
+value representation that best accommodates those patterns.
+
+Some applications may choose to offer variables with slightly different names
+than the top-level blocks in order to allow for more concise references, such
+as abbreviating ``service`` to ``svc`` in the above examples. This should be
+done with care since it may make the relationship between the two less obvious,
+but this may be a good tradeoff for names that are accessed frequently that
+might otherwise hurt the readability of expressions they are embedded in.
+Familiarity permits brevity.
+
+Many applications will not make blocks results available for use in other
+expressions at all, in which case they are free to select whichever variable
+names make sense for what is being exposed. For example, a format may make
+environment variable values available for use in expressions, and may do so
+either as top-level variables (if no other variables are needed) or as an
+object named ``env``, which can be used as in ``env.HOME``.
+
+Text Editor and IDE Integrations
+--------------------------------
+
+Since HCL defines only low-level syntax, a text editor or IDE integration for
+HCL itself can only really provide basic syntax highlighting.
+
+For non-trivial HCL-based languages, a more specialized editor integration may
+be warranted. For example, users writing configuration for HashiCorp Terraform
+must recall the argument names for numerous different provider plugins, and so
+auto-completion and documentation hovertips can be a great help, and
+configurations are commonly spread over multiple files making "Go to Definition"
+functionality useful. None of this functionality can be implemented generically
+for all HCL-based languages since it relies on knowledge of the structure of
+Terraform's own language.
+
+Writing such text editor integrations is out of the scope of this guide. The
+Go implementation of HCL does have some building blocks to help with this, but
+it will always be an application-specific effort.
+
+However, in order to *enable* such integrations, it is best to establish a
+conventional file extension *other than* `.hcl` for each non-trivial HCL-based
+language, thus allowing text editors to recognize it and enable the suitable
+integration. For example, Terraform requires ``.tf`` and ``.tf.json`` filenames
+for its main configuration, and the ``hcldec`` utility in the HCL repository
+accepts spec files that should conventionally be named with an ``.hcldec``
+extension.
+
+For simple languages that are unlikely to benefit from specific editor
+integrations, using the ``.hcl`` extension is fine and may cause an editor to
+enable basic syntax highlighting, absent any other deeper features. An editor
+extension for a specific HCL-based language should *not* match generically the
+``.hcl`` extension, since this can cause confusing results for users
+attempting to write configuration files targeting other applications.
diff --git a/guide/make.bat b/guide/make.bat
new file mode 100644
index 0000000..08ad4e0
--- /dev/null
+++ b/guide/make.bat
@@ -0,0 +1,36 @@
+@ECHO OFF

+

+pushd %~dp0

+

+REM Command file for Sphinx documentation

+

+if "%SPHINXBUILD%" == "" (

+	set SPHINXBUILD=sphinx-build

+)

+set SOURCEDIR=.

+set BUILDDIR=_build

+set SPHINXPROJ=HCL

+

+if "%1" == "" goto help

+

+%SPHINXBUILD% >NUL 2>NUL

+if errorlevel 9009 (

+	echo.

+	echo.The 'sphinx-build' command was not found. Make sure you have Sphinx

+	echo.installed, then set the SPHINXBUILD environment variable to point

+	echo.to the full path of the 'sphinx-build' executable. Alternatively you

+	echo.may add the Sphinx directory to PATH.

+	echo.

+	echo.If you don't have Sphinx installed, grab it from

+	echo.http://sphinx-doc.org/

+	exit /b 1

+)

+

+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%

+goto end

+

+:help

+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%

+

+:end

+popd

diff --git a/guide/requirements.txt b/guide/requirements.txt
new file mode 100644
index 0000000..421475a
--- /dev/null
+++ b/guide/requirements.txt
@@ -0,0 +1,3 @@
+sphinx
+sphinxcontrib-golangdomain
+sphinx-autoapi
diff --git a/hcldec/block_labels.go b/hcldec/block_labels.go
new file mode 100644
index 0000000..71de451
--- /dev/null
+++ b/hcldec/block_labels.go
@@ -0,0 +1,21 @@
+package hcldec
+
+import (
+	"github.com/hashicorp/hcl/v2"
+)
+
+type blockLabel struct {
+	Value string
+	Range hcl.Range
+}
+
+func labelsForBlock(block *hcl.Block) []blockLabel {
+	ret := make([]blockLabel, len(block.Labels))
+	for i := range block.Labels {
+		ret[i] = blockLabel{
+			Value: block.Labels[i],
+			Range: block.LabelRanges[i],
+		}
+	}
+	return ret
+}
diff --git a/hcldec/decode.go b/hcldec/decode.go
new file mode 100644
index 0000000..c6e4223
--- /dev/null
+++ b/hcldec/decode.go
@@ -0,0 +1,36 @@
+package hcldec
+
+import (
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+func decode(body hcl.Body, blockLabels []blockLabel, ctx *hcl.EvalContext, spec Spec, partial bool) (cty.Value, hcl.Body, hcl.Diagnostics) {
+	schema := ImpliedSchema(spec)
+
+	var content *hcl.BodyContent
+	var diags hcl.Diagnostics
+	var leftovers hcl.Body
+
+	if partial {
+		content, leftovers, diags = body.PartialContent(schema)
+	} else {
+		content, diags = body.Content(schema)
+	}
+
+	val, valDiags := spec.decode(content, blockLabels, ctx)
+	diags = append(diags, valDiags...)
+
+	return val, leftovers, diags
+}
+
+func impliedType(spec Spec) cty.Type {
+	return spec.impliedType()
+}
+
+func sourceRange(body hcl.Body, blockLabels []blockLabel, spec Spec) hcl.Range {
+	schema := ImpliedSchema(spec)
+	content, _, _ := body.PartialContent(schema)
+
+	return spec.sourceRange(content, blockLabels)
+}
diff --git a/hcldec/doc.go b/hcldec/doc.go
new file mode 100644
index 0000000..23bfe54
--- /dev/null
+++ b/hcldec/doc.go
@@ -0,0 +1,12 @@
+// Package hcldec provides a higher-level API for unpacking the content of
+// HCL bodies, implemented in terms of the low-level "Content" API exposed
+// by the bodies themselves.
+//
+// It allows decoding an entire nested configuration in a single operation
+// by providing a description of the intended structure.
+//
+// For some applications it may be more convenient to use the "gohcl"
+// package, which has a similar purpose but decodes directly into native
+// Go data types. hcldec instead targets the cty type system, and thus allows
+// a cty-driven application to remain within that type system.
+package hcldec
diff --git a/hcldec/gob.go b/hcldec/gob.go
new file mode 100644
index 0000000..e2027cf
--- /dev/null
+++ b/hcldec/gob.go
@@ -0,0 +1,23 @@
+package hcldec
+
+import (
+	"encoding/gob"
+)
+
+func init() {
+	// Every Spec implementation should be registered with gob, so that
+	// specs can be sent over gob channels, such as using
+	// github.com/hashicorp/go-plugin with plugins that need to describe
+	// what shape of configuration they are expecting.
+	gob.Register(ObjectSpec(nil))
+	gob.Register(TupleSpec(nil))
+	gob.Register((*AttrSpec)(nil))
+	gob.Register((*LiteralSpec)(nil))
+	gob.Register((*ExprSpec)(nil))
+	gob.Register((*BlockSpec)(nil))
+	gob.Register((*BlockListSpec)(nil))
+	gob.Register((*BlockSetSpec)(nil))
+	gob.Register((*BlockMapSpec)(nil))
+	gob.Register((*BlockLabelSpec)(nil))
+	gob.Register((*DefaultSpec)(nil))
+}
diff --git a/hcldec/public.go b/hcldec/public.go
new file mode 100644
index 0000000..1fa548d
--- /dev/null
+++ b/hcldec/public.go
@@ -0,0 +1,81 @@
+package hcldec
+
+import (
+	"github.com/hashicorp/hcl/v2"
+	"github.com/zclconf/go-cty/cty"
+)
+
+// Decode interprets the given body using the given specification and returns
+// the resulting value. If the given body is not valid per the spec, error
+// diagnostics are returned and the returned value is likely to be incomplete.
+//
+// The ctx argument may be nil, in which case any references to variables or
+// functions will produce error diagnostics.
+func Decode(body hcl.Body, spec Spec, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+	val, _, diags := decode(body, nil, ctx, spec, false)
+	return val, diags
+}
+
+// PartialDecode is like Decode except that it permits "leftover" items in
+// the top-level body, which are returned as a new body to allow for
+// further processing.
+//
+// Any descendent block bodies are _not_ decoded partially and thus must
+// be fully described by the given specification.
+func PartialDecode(body hcl.Body, spec Spec, ctx *hcl.EvalContext) (cty.Value, hcl.Body, hcl.Diagnostics) {
+	return decode(body, nil, ctx, spec, true)
+}
+
+// ImpliedType returns the value type that should result from decoding the
+// given spec.
+func ImpliedType(spec Spec) cty.Type {
+	return impliedType(spec)
+}
+
+// SourceRange interprets the given body using the given specification and
+// then returns the source range of the value that would be used to
+// fulfill the spec.
+//
+// This can be used if application-level validation detects value errors, to
+// obtain a reasonable SourceRange to use for generated diagnostics. It works
+// best when applied to specific body items (e.g. using AttrSpec, BlockSpec, ...)
+// as opposed to entire bodies using ObjectSpec, TupleSpec. The result will
+// be less useful the broader the specification, so e.g. a spec that returns
+// the entirety of all of the blocks of a given type is likely to be
+// _particularly_ arbitrary and useless.
+//
+// If the given body is not valid per the given spec, the result is best-effort
+// and may not actually be something ideal. It's expected that an application
+// will already have used Decode or PartialDecode earlier and thus had an
+// opportunity to detect and report spec violations.
+func SourceRange(body hcl.Body, spec Spec) hcl.Range {
+	return sourceRange(body, nil, spec)
+}
+
+// ChildBlockTypes returns a map of all of the child block types declared
+// by the given spec, with block type names as keys and the associated
+// nested body specs as values.
+func ChildBlockTypes(spec Spec) map[string]Spec {
+	ret := map[string]Spec{}
+
+	// visitSameBodyChildren walks through the spec structure, calling
+	// the given callback for each descendent spec encountered. We are
+	// interested in the specs that reference attributes and blocks.
+	var visit visitFunc
+	visit = func(s Spec) {
+		if bs, ok := s.(blockSpec); ok {
+			for _, blockS := range bs.blockHeaderSchemata() {
+				nested := bs.nestedSpec()
+				if nested != nil { // nil can be returned to dynamically opt out of this interface
+					ret[blockS.Type] = nested
+				}
+			}
+		}
+
+		s.visitSameBodyChildren(visit)
+	}
+
+	visit(spec)
+
+	return ret
+}
diff --git a/hcldec/public_test.go b/hcldec/public_test.go
new file mode 100644
index 0000000..20521fb
--- /dev/null
+++ b/hcldec/public_test.go
@@ -0,0 +1,1122 @@
+package hcldec
+
+import (
+	"fmt"
+	"reflect"
+	"testing"
+
+	"github.com/hashicorp/hcl/v2"
+	"github.com/hashicorp/hcl/v2/hclsyntax"
+	"github.com/zclconf/go-cty/cty"
+)
+
+func TestDecode(t *testing.T) {
+	tests := []struct {
+		config    string
+		spec      Spec
+		ctx       *hcl.EvalContext
+		want      cty.Value
+		diagCount int
+	}{
+		{
+			``,
+			&ObjectSpec{},
+			nil,
+			cty.EmptyObjectVal,
+			0,
+		},
+		{
+			"a = 1\n",
+			&ObjectSpec{},
+			nil,
+			cty.EmptyObjectVal,
+			1, // attribute named "a" is not expected here
+		},
+		{
+			"a = 1\n",
+			&ObjectSpec{
+				"a": &AttrSpec{
+					Name: "a",
+					Type: cty.Number,
+				},
+			},
+			nil,
+			cty.ObjectVal(map[string]cty.Value{
+				"a": cty.NumberIntVal(1),
+			}),
+			0,
+		},
+		{
+			"a = 1\n",
+			&AttrSpec{
+				Name: "a",
+				Type: cty.Number,
+			},
+			nil,
+			cty.NumberIntVal(1),
+			0,
+		},
+		{
+			"a = 1\n",
+			&DefaultSpec{
+				Primary: &AttrSpec{
+					Name: "a",
+					Type: cty.Number,
+				},
+				Default: &LiteralSpec{
+					Value: cty.NumberIntVal(10),
+				},
+			},
+			nil,
+			cty.NumberIntVal(1),
+			0,
+		},
+		{
+			"",
+			&DefaultSpec{
+				Primary: &AttrSpec{
+					Name: "a",
+					Type: cty.Number,
+				},
+				Default: &LiteralSpec{
+					Value: cty.NumberIntVal(10),
+				},
+			},
+			nil,
+			cty.NumberIntVal(10),
+			0,
+		},
+		{
+			"a = 1\n",
+			ObjectSpec{
+				"foo": &DefaultSpec{
+					Primary: &AttrSpec{
+						Name: "a",
+						Type: cty.Number,
+					},
+					Default: &LiteralSpec{
+						Value: cty.NumberIntVal(10),
+					},
+				},
+			},
+			nil,
+			cty.ObjectVal(map[string]cty.Value{"foo": cty.NumberIntVal(1)}),
+			0,
+		},
+		{
+			"a = \"1\"\n",
+			&AttrSpec{
+				Name: "a",
+				Type: cty.Number,
+			},
+			nil,
+			cty.NumberIntVal(1),
+			0,
+		},
+		{
+			"a = true\n",
+			&AttrSpec{
+				Name: "a",
+				Type: cty.Number,
+			},
+			nil,
+			cty.UnknownVal(cty.Number),
+			1, // incorrect type - number required.
+		},
+		{
+			``,
+			&AttrSpec{
+				Name:     "a",
+				Type:     cty.Number,
+				Required: true,
+			},
+			nil,
+			cty.NullVal(cty.Number),
+			1, // attribute "a" is required
+		},
+
+		{
+			`
+b {
+}
+`,
+			&BlockSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+			},
+			nil,
+			cty.EmptyObjectVal,
+			0,
+		},
+		{
+			`
+b "baz" {
+}
+`,
+			&BlockSpec{
+				TypeName: "b",
+				Nested: &BlockLabelSpec{
+					Index: 0,
+					Name:  "name",
+				},
+			},
+			nil,
+			cty.StringVal("baz"),
+			0,
+		},
+		{
+			`
+b "baz" {}
+b "foo" {}
+`,
+			&BlockSpec{
+				TypeName: "b",
+				Nested: &BlockLabelSpec{
+					Index: 0,
+					Name:  "name",
+				},
+			},
+			nil,
+			cty.StringVal("baz"),
+			1, // duplicate "b" block
+		},
+		{
+			`
+b {
+}
+`,
+			&BlockSpec{
+				TypeName: "b",
+				Nested: &BlockLabelSpec{
+					Index: 0,
+					Name:  "name",
+				},
+			},
+			nil,
+			cty.NullVal(cty.String),
+			1, // missing name label
+		},
+		{
+			``,
+			&BlockSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+			},
+			nil,
+			cty.NullVal(cty.EmptyObject),
+			0,
+		},
+		{
+			"a {}\n",
+			&BlockSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+			},
+			nil,
+			cty.NullVal(cty.EmptyObject),
+			1, // blocks of type "a" are not supported
+		},
+		{
+			``,
+			&BlockSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+				Required: true,
+			},
+			nil,
+			cty.NullVal(cty.EmptyObject),
+			1, // a block of type "b" is required
+		},
+		{
+			`
+b {}
+b {}
+`,
+			&BlockSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+				Required: true,
+			},
+			nil,
+			cty.EmptyObjectVal,
+			1, // only one "b" block is allowed
+		},
+		{
+			`
+b {
+}
+`,
+			&BlockAttrsSpec{
+				TypeName:    "b",
+				ElementType: cty.String,
+			},
+			nil,
+			cty.MapValEmpty(cty.String),
+			0,
+		},
+		{
+			`
+b {
+  hello = "world"
+}
+`,
+			&BlockAttrsSpec{
+				TypeName:    "b",
+				ElementType: cty.String,
+			},
+			nil,
+			cty.MapVal(map[string]cty.Value{
+				"hello": cty.StringVal("world"),
+			}),
+			0,
+		},
+		{
+			`
+b {
+  hello = true
+}
+`,
+			&BlockAttrsSpec{
+				TypeName:    "b",
+				ElementType: cty.String,
+			},
+			nil,
+			cty.MapVal(map[string]cty.Value{
+				"hello": cty.StringVal("true"),
+			}),
+			0,
+		},
+		{
+			`
+b {
+  hello   = true
+  goodbye = 5
+}
+`,
+			&BlockAttrsSpec{
+				TypeName:    "b",
+				ElementType: cty.String,
+			},
+			nil,
+			cty.MapVal(map[string]cty.Value{
+				"hello":   cty.StringVal("true"),
+				"goodbye": cty.StringVal("5"),
+			}),
+			0,
+		},
+		{
+			``,
+			&BlockAttrsSpec{
+				TypeName:    "b",
+				ElementType: cty.String,
+			},
+			nil,
+			cty.NullVal(cty.Map(cty.String)),
+			0,
+		},
+		{
+			``,
+			&BlockAttrsSpec{
+				TypeName:    "b",
+				ElementType: cty.String,
+				Required:    true,
+			},
+			nil,
+			cty.NullVal(cty.Map(cty.String)),
+			1, // missing b block
+		},
+		{
+			`
+b {
+}
+b {
+}
+			`,
+			&BlockAttrsSpec{
+				TypeName:    "b",
+				ElementType: cty.String,
+			},
+			nil,
+			cty.MapValEmpty(cty.String),
+			1, // duplicate b block
+		},
+		{
+			`
+b {
+}
+b {
+}
+			`,
+			&BlockAttrsSpec{
+				TypeName:    "b",
+				ElementType: cty.String,
+				Required:    true,
+			},
+			nil,
+			cty.MapValEmpty(cty.String),
+			1, // duplicate b block
+		},
+		{
+			`
+b {}
+b {}
+`,
+			&BlockListSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+			},
+			nil,
+			cty.ListVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal}),
+			0,
+		},
+		{
+			``,
+			&BlockListSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+			},
+			nil,
+			cty.ListValEmpty(cty.EmptyObject),
+			0,
+		},
+		{
+			`
+b "foo" {}
+b "bar" {}
+`,
+			&BlockListSpec{
+				TypeName: "b",
+				Nested: &BlockLabelSpec{
+					Name:  "name",
+					Index: 0,
+				},
+			},
+			nil,
+			cty.ListVal([]cty.Value{cty.StringVal("foo"), cty.StringVal("bar")}),
+			0,
+		},
+		{
+			`
+b {}
+b {}
+b {}
+`,
+			&BlockListSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+				MaxItems: 2,
+			},
+			nil,
+			cty.ListVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal, cty.EmptyObjectVal}),
+			1, // too many b blocks
+		},
+		{
+			`
+b {}
+b {}
+`,
+			&BlockListSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+				MinItems: 10,
+			},
+			nil,
+			cty.ListVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal}),
+			1, // insufficient b blocks
+		},
+		{
+			`
+b {
+	a = true
+}
+b {
+	a = 1
+}
+`,
+			&BlockListSpec{
+				TypeName: "b",
+				Nested: &AttrSpec{
+					Name: "a",
+					Type: cty.DynamicPseudoType,
+				},
+			},
+			nil,
+			cty.DynamicVal,
+			1, // Unconsistent argument types in b blocks
+		},
+		{
+			`
+b {
+	a = true
+}
+b {
+	a = "not a bool"
+}
+`,
+			&BlockListSpec{
+				TypeName: "b",
+				Nested: &AttrSpec{
+					Name: "a",
+					Type: cty.DynamicPseudoType,
+				},
+			},
+			nil,
+			cty.ListVal([]cty.Value{
+				cty.StringVal("true"), // type unification generalizes all the values to strings
+				cty.StringVal("not a bool"),
+			}),
+			0,
+		},
+		{
+			`
+b {}
+b {}
+`,
+			&BlockSetSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+				MaxItems: 2,
+			},
+			nil,
+			cty.SetVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal}),
+			0,
+		},
+		{
+			`
+b "foo" "bar" {}
+b "bar" "baz" {}
+`,
+			&BlockSetSpec{
+				TypeName: "b",
+				Nested: TupleSpec{
+					&BlockLabelSpec{
+						Name:  "name",
+						Index: 1,
+					},
+					&BlockLabelSpec{
+						Name:  "type",
+						Index: 0,
+					},
+				},
+			},
+			nil,
+			cty.SetVal([]cty.Value{
+				cty.TupleVal([]cty.Value{cty.StringVal("bar"), cty.StringVal("foo")}),
+				cty.TupleVal([]cty.Value{cty.StringVal("baz"), cty.StringVal("bar")}),
+			}),
+			0,
+		},
+		{
+			`
+b {
+	a = true
+}
+b {
+	a = 1
+}
+`,
+			&BlockSetSpec{
+				TypeName: "b",
+				Nested: &AttrSpec{
+					Name: "a",
+					Type: cty.DynamicPseudoType,
+				},
+			},
+			nil,
+			cty.DynamicVal,
+			1, // Unconsistent argument types in b blocks
+		},
+		{
+			`
+b {
+	a = true
+}
+b {
+	a = "not a bool"
+}
+`,
+			&BlockSetSpec{
+				TypeName: "b",
+				Nested: &AttrSpec{
+					Name: "a",
+					Type: cty.DynamicPseudoType,
+				},
+			},
+			nil,
+			cty.SetVal([]cty.Value{
+				cty.StringVal("true"), // type unification generalizes all the values to strings
+				cty.StringVal("not a bool"),
+			}),
+			0,
+		},
+		{
+			`
+b "foo" {}
+b "bar" {}
+`,
+			&BlockMapSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.MapVal(map[string]cty.Value{"foo": cty.EmptyObjectVal, "bar": cty.EmptyObjectVal}),
+			0,
+		},
+		{
+			`
+b "foo" "bar" {}
+b "bar" "baz" {}
+`,
+			&BlockMapSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key1", "key2"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.MapVal(map[string]cty.Value{
+				"foo": cty.MapVal(map[string]cty.Value{
+					"bar": cty.EmptyObjectVal,
+				}),
+				"bar": cty.MapVal(map[string]cty.Value{
+					"baz": cty.EmptyObjectVal,
+				}),
+			}),
+			0,
+		},
+		{
+			`
+b "foo" "bar" {}
+b "bar" "bar" {}
+`,
+			&BlockMapSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key1", "key2"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.MapVal(map[string]cty.Value{
+				"foo": cty.MapVal(map[string]cty.Value{
+					"bar": cty.EmptyObjectVal,
+				}),
+				"bar": cty.MapVal(map[string]cty.Value{
+					"bar": cty.EmptyObjectVal,
+				}),
+			}),
+			0,
+		},
+		{
+			`
+b "foo" "bar" {}
+b "foo" "baz" {}
+`,
+			&BlockMapSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key1", "key2"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.MapVal(map[string]cty.Value{
+				"foo": cty.MapVal(map[string]cty.Value{
+					"bar": cty.EmptyObjectVal,
+					"baz": cty.EmptyObjectVal,
+				}),
+			}),
+			0,
+		},
+		{
+			`
+b "foo" "bar" {}
+`,
+			&BlockMapSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.MapValEmpty(cty.EmptyObject),
+			1, // too many labels
+		},
+		{
+			`
+b "bar" {}
+`,
+			&BlockMapSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key1", "key2"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.MapValEmpty(cty.EmptyObject),
+			1, // not enough labels
+		},
+		{
+			`
+b "foo" {}
+b "foo" {}
+`,
+			&BlockMapSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.MapVal(map[string]cty.Value{"foo": cty.EmptyObjectVal}),
+			1, // duplicate b block
+		},
+		{
+			`
+b "foo" "bar" {}
+b "foo" "bar" {}
+`,
+			&BlockMapSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key1", "key2"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.MapVal(map[string]cty.Value{"foo": cty.MapVal(map[string]cty.Value{"bar": cty.EmptyObjectVal})}),
+			1, // duplicate b block
+		},
+		{
+			`
+b "foo" "bar" {}
+b "bar" "baz" {}
+`,
+			&BlockMapSpec{
+				TypeName:   "b",
+				LabelNames: []string{"type"},
+				Nested: &BlockLabelSpec{
+					Name:  "name",
+					Index: 0,
+				},
+			},
+			nil,
+			cty.MapVal(map[string]cty.Value{
+				"foo": cty.StringVal("bar"),
+				"bar": cty.StringVal("baz"),
+			}),
+			0,
+		},
+		{
+			`
+b "foo" {}
+`,
+			&BlockMapSpec{
+				TypeName:   "b",
+				LabelNames: []string{"type"},
+				Nested: &BlockLabelSpec{
+					Name:  "name",
+					Index: 0,
+				},
+			},
+			nil,
+			cty.MapValEmpty(cty.String),
+			1, // missing name
+		},
+		{
+			`
+b {}
+b {}
+`,
+			&BlockTupleSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+			},
+			nil,
+			cty.TupleVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal}),
+			0,
+		},
+		{
+			``,
+			&BlockTupleSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+			},
+			nil,
+			cty.EmptyTupleVal,
+			0,
+		},
+		{
+			`
+b "foo" {}
+b "bar" {}
+`,
+			&BlockTupleSpec{
+				TypeName: "b",
+				Nested: &BlockLabelSpec{
+					Name:  "name",
+					Index: 0,
+				},
+			},
+			nil,
+			cty.TupleVal([]cty.Value{cty.StringVal("foo"), cty.StringVal("bar")}),
+			0,
+		},
+		{
+			`
+b {}
+b {}
+b {}
+`,
+			&BlockTupleSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+				MaxItems: 2,
+			},
+			nil,
+			cty.TupleVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal, cty.EmptyObjectVal}),
+			1, // too many b blocks
+		},
+		{
+			`
+b {}
+b {}
+`,
+			&BlockTupleSpec{
+				TypeName: "b",
+				Nested:   ObjectSpec{},
+				MinItems: 10,
+			},
+			nil,
+			cty.TupleVal([]cty.Value{cty.EmptyObjectVal, cty.EmptyObjectVal}),
+			1, // insufficient b blocks
+		},
+		{
+			`
+b {
+	a = true
+}
+b {
+	a = 1
+}
+`,
+			&BlockTupleSpec{
+				TypeName: "b",
+				Nested: &AttrSpec{
+					Name: "a",
+					Type: cty.DynamicPseudoType,
+				},
+			},
+			nil,
+			cty.TupleVal([]cty.Value{
+				cty.True,
+				cty.NumberIntVal(1),
+			}),
+			0,
+		},
+		{
+			`
+b {
+	a = true
+}
+b {
+	a = "not a bool"
+}
+`,
+			&BlockTupleSpec{
+				TypeName: "b",
+				Nested: &AttrSpec{
+					Name: "a",
+					Type: cty.DynamicPseudoType,
+				},
+			},
+			nil,
+			cty.TupleVal([]cty.Value{
+				cty.True,
+				cty.StringVal("not a bool"),
+			}),
+			0,
+		},
+		{
+			`
+b "foo" {}
+b "bar" {}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.ObjectVal(map[string]cty.Value{"foo": cty.EmptyObjectVal, "bar": cty.EmptyObjectVal}),
+			0,
+		},
+		{
+			`
+b "foo" "bar" {}
+b "bar" "baz" {}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key1", "key2"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.ObjectVal(map[string]cty.Value{
+				"foo": cty.ObjectVal(map[string]cty.Value{
+					"bar": cty.EmptyObjectVal,
+				}),
+				"bar": cty.ObjectVal(map[string]cty.Value{
+					"baz": cty.EmptyObjectVal,
+				}),
+			}),
+			0,
+		},
+		{
+			`
+b "foo" "bar" {}
+b "bar" "bar" {}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key1", "key2"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.ObjectVal(map[string]cty.Value{
+				"foo": cty.ObjectVal(map[string]cty.Value{
+					"bar": cty.EmptyObjectVal,
+				}),
+				"bar": cty.ObjectVal(map[string]cty.Value{
+					"bar": cty.EmptyObjectVal,
+				}),
+			}),
+			0,
+		},
+		{
+			`
+b "foo" "bar" {}
+b "foo" "baz" {}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key1", "key2"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.ObjectVal(map[string]cty.Value{
+				"foo": cty.ObjectVal(map[string]cty.Value{
+					"bar": cty.EmptyObjectVal,
+					"baz": cty.EmptyObjectVal,
+				}),
+			}),
+			0,
+		},
+		{
+			`
+b "foo" "bar" {}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.EmptyObjectVal,
+			1, // too many labels
+		},
+		{
+			`
+b "bar" {}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key1", "key2"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.EmptyObjectVal,
+			1, // not enough labels
+		},
+		{
+			`
+b "foo" {}
+b "foo" {}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.ObjectVal(map[string]cty.Value{"foo": cty.EmptyObjectVal}),
+			1, // duplicate b block
+		},
+		{
+			`
+b "foo" "bar" {}
+b "foo" "bar" {}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"key1", "key2"},
+				Nested:     ObjectSpec{},
+			},
+			nil,
+			cty.ObjectVal(map[string]cty.Value{"foo": cty.ObjectVal(map[string]cty.Value{"bar": cty.EmptyObjectVal})}),
+			1, // duplicate b block
+		},
+		{
+			`
+b "foo" "bar" {}
+b "bar" "baz" {}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"type"},
+				Nested: &BlockLabelSpec{
+					Name:  "name",
+					Index: 0,
+				},
+			},
+			nil,
+			cty.ObjectVal(map[string]cty.Value{
+				"foo": cty.StringVal("bar"),
+				"bar": cty.StringVal("baz"),
+			}),
+			0,
+		},
+		{
+			`
+b "foo" {}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"type"},
+				Nested: &BlockLabelSpec{
+					Name:  "name",
+					Index: 0,
+				},
+			},
+			nil,
+			cty.EmptyObjectVal,
+			1, // missing name
+		},
+		{
+			`
+b "foo" {
+	arg = true
+}
+b "bar" {
+	arg = 1
+}
+`,
+			&BlockObjectSpec{
+				TypeName:   "b",
+				LabelNames: []string{"type"},
+				Nested: &AttrSpec{
+					Name: "arg",
+					Type: cty.DynamicPseudoType,
+				},
+			},
+			nil,
+			cty.ObjectVal(map[string]cty.Value{
+				"foo": cty.True,
+				"bar": cty.NumberIntVal(1),
+			}),
+			0,
+		},
+	}
+
+	for i, test := range tests {
+		t.Run(fmt.Sprintf("%02d-%s", i, test.config), func(t *testing.T) {
+			file, parseDiags := hclsyntax.ParseConfig([]byte(test.config), "", hcl.Pos{Line: 1, Column: 1, Byte: 0})
+			body := file.Body
+			got, valDiags := Decode(body, test.spec, test.ctx)
+
+			var diags hcl.Diagnostics
+			diags = append(diags, parseDiags...)
+			diags = append(diags, valDiags...)
+
+			if len(diags) != test.diagCount {
+				t.Errorf("wrong number of diagnostics %d; want %d", len(diags), test.diagCount)
+				for _, diag := range diags {
+					t.Logf(" - %s", diag.Error())
+				}
+			}
+
+			if !got.RawEquals(test.want) {
+				t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, test.want)
+			}
+		})
+	}
+}
+
+func TestSourceRange(t *testing.T) {
+	tests := []struct {
+		config string
+		spec   Spec
+		want   hcl.Range
+	}{
+		{
+			"a = 1\n",
+			&AttrSpec{
+				Name: "a",
+			},
+			hcl.Range{
+				Start: hcl.Pos{Line: 1, Column: 5, Byte: 4},
+				End:   hcl.Pos{Line: 1, Column: 6, Byte: 5},
+			},
+		},
+		{
+			`
+b {
+  a = 1
+}
+`,
+			&BlockSpec{
+				TypeName: "b",
+				Nested: &AttrSpec{
+					Name: "a",
+				},
+			},
+			hcl.Range{
+				Start: hcl.Pos{Line: 3, Column: 7, Byte: 11},
+				End:   hcl.Pos{Line: 3, Column: 8, Byte: 12},
+			},
+		},
+		{
+			`
+b {
+  c {
+    a = 1
+  }
+}
+`,
+			&BlockSpec{
+				TypeName: "b",
+				Nested: &BlockSpec{
+					TypeName: "c",
+					Nested: &AttrSpec{
+						Name: "a",
+					},
+				},
+			},
+			hcl.Range{
+				Start: hcl.Pos{Line: 4, Column: 9, Byte: 19},
+				End:   hcl.Pos{Line: 4, Column: 10, Byte: 20},
+			},
+		},
+	}
+
+	for i, test := range tests {
+		t.Run(fmt.Sprintf("%02d-%s", i, test.config), func(t *testing.T) {
+			file, diags := hclsyntax.ParseConfig([]byte(test.config), "", hcl.Pos{Line: 1, Column: 1, Byte: 0})
+			if len(diags) != 0 {
+				t.Errorf("wrong number of diagnostics %d; want %d", len(diags), 0)
+				for _, diag := range diags {
+					t.Logf(" - %s", diag.Error())
+				}
+			}
+			body := file.Body
+
+			got := SourceRange(body, test.spec)
+
+			if !reflect.DeepEqual(got, test.want) {
+				t.Errorf("wrong result\ngot:  %#v\nwant: %#v", got, test.want)
+			}
+		})
+	}
+
+}
diff --git a/hcldec/schema.go b/hcldec/schema.go
new file mode 100644
index 0000000..ddbe7fa
--- /dev/null
+++ b/hcldec/schema.go
@@ -0,0 +1,36 @@
+package hcldec
+
+import (
+	"github.com/hashicorp/hcl/v2"
+)
+
+// ImpliedSchema returns the *hcl.BodySchema implied by the given specification.
+// This is the schema that the Decode function will use internally to
+// access the content of a given body.
+func ImpliedSchema(spec Spec) *hcl.BodySchema {
+	var attrs []hcl.AttributeSchema
+	var blocks []hcl.BlockHeaderSchema
+
+	// visitSameBodyChildren walks through the spec structure, calling
+	// the given callback for each descendent spec encountered. We are
+	// interested in the specs that reference attributes and blocks.
+	var visit visitFunc
+	visit = func(s Spec) {
+		if as, ok := s.(attrSpec); ok {
+			attrs = append(attrs, as.attrSchemata()...)
+		}
+
+		if bs, ok := s.(blockSpec); ok {
+			blocks = append(blocks, bs.blockHeaderSchemata()...)
+		}
+
+		s.visitSameBodyChildren(visit)
+	}
+
+	visit(spec)
+
+	return &hcl.BodySchema{
+		Attributes: attrs,
+		Blocks:     blocks,
+	}
+}