-
Notifications
You must be signed in to change notification settings - Fork 23
/
dsl.ex
302 lines (244 loc) · 9.32 KB
/
dsl.ex
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
defmodule Spark.Dsl do
@using_schema [
single_extension_kinds: [
type: {:list, :atom},
default: [],
doc:
"The extension kinds that are allowed to have a single value. For example: `[:data_layer]`"
],
many_extension_kinds: [
type: {:list, :atom},
default: [],
doc:
"The extension kinds that can have multiple values. e.g `[notifiers: [Notifier1, Notifier2]]`"
],
untyped_extensions?: [
type: :boolean,
default: true,
doc: "Whether or not to support an `extensions` key which contains untyped extensions"
],
default_extensions: [
type: :keyword_list,
default: [],
doc: """
The extensions that are included by default. e.g `[data_layer: Default, notifiers: [Notifier1]]`
Default values for single extension kinds are overwritten if specified by the implementor, while many extension
kinds are appended to if specified by the implementor.
"""
]
]
@type entity :: %Spark.Dsl.Entity{}
@type section :: %Spark.Dsl.Section{}
@moduledoc """
The primary entry point for adding a DSL to a module.
To add a DSL to a module, add `use Spark.Dsl, ...options`. The options supported with `use Spark.Dsl` are:
#{Spark.OptionsHelpers.docs(@using_schema)}
See the callbacks defined in this module to augment the behavior/compilation of the module getting a Dsl.
## Schemas/Data Types
Spark DSLs use a superset of `NimbleOptions` for the `schema` that makes up sections/entities of the DSL.
For more information, see `Spark.OptionsHelpers`.
"""
@type opts :: Keyword.t()
@type t :: map()
@doc """
Validate/add options. Those options will be passed to `handle_opts` and `handle_before_compile`
"""
@callback init(opts) :: {:ok, opts} | {:error, String.t() | term}
@doc """
Handle options in the context of the module. Must return a `quote` block.
If you want to persist anything in the DSL persistence layer,
use `@persist {:key, value}`. It can be called multiple times to
persist multiple times.
"""
@callback handle_opts(Keyword.t()) :: Macro.t()
@doc """
Handle options in the context of the module, after all extensions have been processed. Must return a `quote` block.
"""
@callback handle_before_compile(Keyword.t()) :: Macro.t()
defmacro __using__(opts) do
opts = Spark.OptionsHelpers.validate!(opts, @using_schema)
their_opt_schema =
Enum.map(opts[:single_extension_kinds], fn extension_kind ->
{extension_kind, type: :atom, default: opts[:default_extensions][extension_kind]}
end) ++
Enum.map(opts[:many_extension_kinds], fn extension_kind ->
{extension_kind, type: {:list, :atom}, default: []}
end)
their_opt_schema =
if opts[:untyped_extensions?] do
Keyword.put(their_opt_schema, :extensions, type: {:list, :atom})
else
their_opt_schema
end
their_opt_schema = Keyword.put(their_opt_schema, :otp_app, type: :atom)
quote bind_quoted: [
their_opt_schema: their_opt_schema,
parent_opts: opts,
parent: __CALLER__.module
],
generated: true do
require Spark.Dsl.Extension
@dialyzer {:nowarn_function, handle_opts: 1, handle_before_compile: 1}
Module.register_attribute(__MODULE__, :spark_dsl, persist: true)
Module.register_attribute(__MODULE__, :spark_default_extensions, persist: true)
Module.register_attribute(__MODULE__, :spark_extension_kinds, persist: true)
@spark_dsl true
@spark_default_extensions parent_opts[:default_extensions]
|> Keyword.values()
|> List.flatten()
@spark_extension_kinds List.wrap(parent_opts[:many_extension_kinds]) ++
List.wrap(parent_opts[:single_extension_kinds])
def init(opts), do: {:ok, opts}
def default_extensions, do: @spark_default_extensions
def handle_opts(opts) do
quote do
end
end
def handle_before_compile(opts) do
quote do
end
end
defoverridable init: 1, handle_opts: 1, handle_before_compile: 1
defmacro __using__(opts) do
parent = unquote(parent)
parent_opts = unquote(parent_opts)
their_opt_schema = unquote(their_opt_schema)
require Spark.Dsl.Extension
{opts, extensions} =
parent_opts[:default_extensions]
|> Enum.reduce(opts, fn {key, defaults}, opts ->
Keyword.update(opts, key, defaults, fn current_value ->
cond do
key in parent_opts[:single_extension_kinds] ->
current_value || defaults
key in parent_opts[:many_extension_kinds] || key == :extensions ->
List.wrap(current_value) ++ List.wrap(defaults)
true ->
current_value
end
end)
end)
|> Spark.Dsl.expand_modules(parent_opts, __CALLER__)
opts =
opts
|> Spark.OptionsHelpers.validate!(their_opt_schema)
|> init()
|> Spark.Dsl.unwrap()
body =
quote generated: true do
parent = unquote(parent)
opts = unquote(opts)
parent_opts = unquote(parent_opts)
their_opt_schema = unquote(their_opt_schema)
@opts opts
@before_compile Spark.Dsl
@after_compile __MODULE__
@spark_is parent
@spark_parent parent
def spark_is, do: @spark_is
defmacro __after_compile__(_, _) do
quote do
transformers_to_run =
@extensions
|> Enum.flat_map(& &1.transformers())
|> Spark.Dsl.Transformer.sort()
|> Enum.filter(& &1.after_compile?())
__MODULE__
|> Spark.Dsl.Extension.run_transformers(
transformers_to_run,
@spark_dsl_config,
__ENV__
)
end
end
Module.register_attribute(__MODULE__, :persist, accumulate: true)
opts
|> @spark_parent.handle_opts()
|> Code.eval_quoted([], __ENV__)
if opts[:otp_app] do
@persist {:otp_app, opts[:otp_app]}
end
@persist {:module, __MODULE__}
for single_extension_kind <- parent_opts[:single_extension_kinds] do
@persist {single_extension_kind, opts[single_extension_kind]}
Module.put_attribute(__MODULE__, single_extension_kind, opts[single_extension_kind])
end
for many_extension_kind <- parent_opts[:many_extension_kinds] do
@persist {many_extension_kind, opts[many_extension_kind] || []}
Module.put_attribute(
__MODULE__,
many_extension_kind,
opts[many_extension_kind] || []
)
end
end
preparations = Spark.Dsl.Extension.prepare(extensions)
[body | preparations]
end
end
end
@doc false
def unwrap({:ok, value}), do: value
def unwrap({:error, error}), do: raise(error)
@doc false
def expand_modules(opts, their_opt_schema, env) do
Enum.reduce(opts, {[], []}, fn {key, value}, {opts, extensions} ->
cond do
key in their_opt_schema[:single_extension_kinds] ->
mod = Macro.expand(value, env)
extensions =
if Spark.implements_behaviour?(mod, Spark.Dsl.Extension) do
[mod | extensions]
else
extensions
end
{Keyword.put(opts, key, mod), extensions}
key in their_opt_schema[:many_extension_kinds] || key == :extensions ->
mods =
value
|> List.wrap()
|> Enum.map(&Macro.expand(&1, env))
extensions =
extensions ++
Enum.filter(mods, &Spark.implements_behaviour?(&1, Spark.Dsl.Extension))
{Keyword.put(opts, key, mods), extensions}
true ->
{Keyword.put(opts, key, value), extensions}
end
end)
end
defmacro __before_compile__(env) do
parent = Module.get_attribute(env.module, :spark_parent)
opts = Module.get_attribute(env.module, :opts)
parent_code = parent.handle_before_compile(opts)
code =
quote generated: true, bind_quoted: [dsl: __MODULE__] do
require Spark.Dsl.Extension
Module.register_attribute(__MODULE__, :spark_is, persist: true)
Module.put_attribute(__MODULE__, :spark_is, @spark_is)
Spark.Dsl.Extension.set_state(@persist)
for {block, bindings} <- @spark_dsl_config[:eval] || [] do
Code.eval_quoted(block, bindings, __ENV__)
end
def __spark_placeholder__, do: nil
def spark_dsl_config do
@spark_dsl_config
end
if @moduledoc do
@moduledoc """
#{@moduledoc}
#{Spark.Dsl.Extension.explain(@extensions, @spark_dsl_config)}
"""
else
@moduledoc Spark.Dsl.Extension.explain(@extensions, @spark_dsl_config)
end
end
[code, parent_code]
end
def is?(module, type) when is_atom(module) do
module.spark_is() == type
rescue
_ -> false
end
def is?(_module, _type), do: false
end