Phases 1-7: Complete CljElixir compiler through Malli schema adapter
Bootstrap compiler (reader, analyzer, transformer, compiler, Mix plugin), core protocols (16 protocols for Map/List/Tuple/BitString), PersistentVector (bit-partitioned trie), domain tools (clojurify/elixirify), BEAM concurrency (receive, spawn, GenServer), control flow & macros (threading, try/catch, destructuring, defmacro with quasiquote/auto-gensym), and Malli schema adapter (m/=> specs, auto @type, recursive schemas, cross-references). 537 compiler tests + 55 Malli unit tests + 15 integration tests = 607 total. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,71 @@
|
||||
defmodule CljElixir do
|
||||
@moduledoc "CljElixir: Clojure-syntax language for the BEAM"
|
||||
|
||||
@doc """
|
||||
Compile a CljElixir source string to Elixir AST.
|
||||
|
||||
Returns `{:ok, elixir_ast}` on success, or `{:error, diagnostics}` on failure.
|
||||
|
||||
## Options
|
||||
|
||||
* `:file` - the file path for error reporting (default: `"nofile"`)
|
||||
|
||||
## Examples
|
||||
|
||||
iex> CljElixir.compile_string("(+ 1 2)")
|
||||
{:ok, {:+, [line: 1], [1, 2]}}
|
||||
|
||||
"""
|
||||
@spec compile_string(String.t(), keyword()) :: {:ok, term()} | {:error, list()}
|
||||
def compile_string(source, opts \\ []) do
|
||||
CljElixir.Compiler.compile_string(source, opts)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Compile a `.clje` file to Elixir AST.
|
||||
|
||||
Reads the file and delegates to `compile_string/2` with the `:file` option set.
|
||||
|
||||
Returns `{:ok, elixir_ast}` on success, or `{:error, diagnostics}` on failure.
|
||||
Raises `File.Error` if the file cannot be read.
|
||||
"""
|
||||
@spec compile_file(Path.t(), keyword()) :: {:ok, term()} | {:error, list()}
|
||||
def compile_file(path, opts \\ []) do
|
||||
CljElixir.Compiler.compile_file(path, opts)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Compile and evaluate a CljElixir source string.
|
||||
|
||||
Returns `{:ok, result, bindings}` on success, or `{:error, diagnostics}` on failure.
|
||||
|
||||
## Options
|
||||
|
||||
* `:file` - the file path for error reporting (default: `"nofile"`)
|
||||
* `:bindings` - variable bindings for evaluation (default: `[]`)
|
||||
* `:env` - the macro environment for evaluation (default: `__ENV__`)
|
||||
|
||||
## Examples
|
||||
|
||||
iex> CljElixir.eval_string("(+ 1 2)")
|
||||
{:ok, 3, []}
|
||||
|
||||
"""
|
||||
@spec eval_string(String.t(), keyword()) :: {:ok, term(), keyword()} | {:error, list()}
|
||||
def eval_string(source, opts \\ []) do
|
||||
CljElixir.Compiler.eval_string(source, opts)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Compile and evaluate a `.clje` file.
|
||||
|
||||
Reads the file and delegates to `eval_string/2` with the `:file` option set.
|
||||
|
||||
Returns `{:ok, result, bindings}` on success, or `{:error, diagnostics}` on failure.
|
||||
Raises `File.Error` if the file cannot be read.
|
||||
"""
|
||||
@spec eval_file(Path.t(), keyword()) :: {:ok, term(), keyword()} | {:error, list()}
|
||||
def eval_file(path, opts \\ []) do
|
||||
CljElixir.Compiler.eval_file(path, opts)
|
||||
end
|
||||
end
|
||||
@@ -0,0 +1,626 @@
|
||||
defmodule CljElixir.Analyzer do
|
||||
@moduledoc """
|
||||
AST analyzer and validator for CljElixir.
|
||||
|
||||
Performs lightweight static analysis on CljElixir AST forms (output of the Reader)
|
||||
before they are passed to the Transformer. Catches common structural errors early
|
||||
with clear diagnostic messages.
|
||||
|
||||
## Validations
|
||||
|
||||
1. **Special form arity** - `defmodule` needs name + body, `let` needs a vector
|
||||
with an even number of binding pairs, `if` needs 2-3 args, `case` needs a
|
||||
subject + even pattern/body pairs, `cond` needs even pairs, `loop` needs a
|
||||
vector with even binding pairs.
|
||||
|
||||
2. **Map literal validation** - Maps must have an even number of forms (key-value pairs).
|
||||
|
||||
3. **`recur` position** - `recur` must appear in tail position. In `if`/`case`/`cond`,
|
||||
the tail position is the last expression of each branch. In `let`/`do`, the tail
|
||||
position is the last expression.
|
||||
|
||||
4. **Nested `recur`** - `recur` inside a nested `loop` should only refer to the
|
||||
innermost loop, not an outer one.
|
||||
|
||||
## Return Value
|
||||
|
||||
Returns `{:ok, forms}` when the AST is valid (passes forms through unchanged),
|
||||
or `{:error, diagnostics}` when errors are found.
|
||||
|
||||
Diagnostics are maps with keys: `:severity`, `:message`, `:line`, `:col`.
|
||||
"""
|
||||
|
||||
@type diagnostic :: %{
|
||||
severity: :error | :warning,
|
||||
message: String.t(),
|
||||
line: non_neg_integer(),
|
||||
col: non_neg_integer()
|
||||
}
|
||||
|
||||
@doc """
|
||||
Analyze and validate a list of CljElixir AST forms.
|
||||
|
||||
Returns `{:ok, forms}` if all validations pass, or `{:error, diagnostics}`
|
||||
with a list of diagnostic maps describing the errors found.
|
||||
"""
|
||||
@spec analyze(list()) :: {:ok, list()} | {:error, [diagnostic()]}
|
||||
def analyze(forms) when is_list(forms) do
|
||||
diagnostics =
|
||||
forms
|
||||
|> Enum.flat_map(fn form -> validate_form(form, %{tail: true, in_loop: false, in_fn: false}) end)
|
||||
|
||||
case Enum.filter(diagnostics, &(&1.severity == :error)) do
|
||||
[] -> {:ok, forms}
|
||||
_errors -> {:error, diagnostics}
|
||||
end
|
||||
end
|
||||
|
||||
def analyze(form) do
|
||||
analyze(List.wrap(form))
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Form validation - dispatches on the head of each s-expression
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# A list form starting with an atom is an s-expression: (special-form ...)
|
||||
defp validate_form({:list, meta, [{:symbol, _, "defmodule"} | args]}, ctx) do
|
||||
validate_defmodule(args, meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:list, meta, [{:symbol, _, "defn"} | args]}, ctx) do
|
||||
validate_defn(args, meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:list, meta, [{:symbol, _, "defn-"} | args]}, ctx) do
|
||||
validate_defn(args, meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:list, meta, [{:symbol, _, "fn"} | args]}, ctx) do
|
||||
validate_fn(args, meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:list, meta, [{:symbol, _, "let"} | args]}, ctx) do
|
||||
validate_let(args, meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:list, meta, [{:symbol, _, "if"} | args]}, ctx) do
|
||||
validate_if(args, meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:list, meta, [{:symbol, _, "case"} | args]}, ctx) do
|
||||
validate_case(args, meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:list, meta, [{:symbol, _, "cond"} | args]}, ctx) do
|
||||
validate_cond(args, meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:list, meta, [{:symbol, _, "loop"} | args]}, ctx) do
|
||||
validate_loop(args, meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:list, meta, [{:symbol, _, "recur"} | _args]}, ctx) do
|
||||
validate_recur(meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:list, meta, [{:symbol, _, "do"} | args]}, ctx) do
|
||||
validate_do(args, meta, ctx)
|
||||
end
|
||||
|
||||
defp validate_form({:map, meta, elements}, ctx) do
|
||||
validate_map_literal(elements, meta, ctx)
|
||||
end
|
||||
|
||||
# Generic list form: validate children
|
||||
defp validate_form({:list, _meta, children}, ctx) when is_list(children) do
|
||||
# In a function call, only the last argument is not necessarily in tail position,
|
||||
# but for recur analysis, none of the arguments to a call are in tail position
|
||||
# (since the call itself might be, but its args are not).
|
||||
non_tail_ctx = %{ctx | tail: false}
|
||||
|
||||
Enum.flat_map(children, fn child ->
|
||||
validate_form(child, non_tail_ctx)
|
||||
end)
|
||||
end
|
||||
|
||||
# Vectors: validate elements
|
||||
defp validate_form({:vector, _meta, elements}, ctx) when is_list(elements) do
|
||||
non_tail_ctx = %{ctx | tail: false}
|
||||
Enum.flat_map(elements, fn el -> validate_form(el, non_tail_ctx) end)
|
||||
end
|
||||
|
||||
# Sets: validate elements
|
||||
defp validate_form({:set, _meta, elements}, ctx) when is_list(elements) do
|
||||
non_tail_ctx = %{ctx | tail: false}
|
||||
Enum.flat_map(elements, fn el -> validate_form(el, non_tail_ctx) end)
|
||||
end
|
||||
|
||||
# Atoms, numbers, strings, symbols, keywords — always valid
|
||||
defp validate_form(_leaf, _ctx), do: []
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Special form validators
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp validate_defmodule(args, meta, ctx) do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
case args do
|
||||
[] ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "defmodule requires a module name and at least one body expression",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
[_name] ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "defmodule requires at least one body expression after the module name",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
[_name | body] ->
|
||||
# Body forms are each in tail position within the module (top-level forms)
|
||||
Enum.flat_map(body, fn form ->
|
||||
validate_form(form, %{ctx | tail: true, in_loop: false, in_fn: false})
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_defn(args, meta, ctx) do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
case args do
|
||||
[] ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "defn requires a function name, parameter vector, and body",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
[_name] ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "defn requires a parameter vector and body after the function name",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
[_name, maybe_doc | rest] ->
|
||||
# Could be: (defn name [params] body...)
|
||||
# or: (defn name "docstring" [params] body...)
|
||||
# or: (defn name ([params1] body1) ([params2] body2)) -- multi-arity
|
||||
fn_ctx = %{ctx | tail: true, in_fn: true, in_loop: false}
|
||||
|
||||
case maybe_doc do
|
||||
# Multi-arity: (defn name (clause1) (clause2) ...)
|
||||
{:list, _, _} ->
|
||||
clauses = [maybe_doc | rest]
|
||||
Enum.flat_map(clauses, fn clause -> validate_fn_clause(clause, fn_ctx) end)
|
||||
|
||||
# Docstring form: (defn name "doc" ...)
|
||||
{:string, _, _} ->
|
||||
validate_defn_body(rest, fn_ctx, line, col)
|
||||
|
||||
# Single arity with param vector: (defn name [params] body...)
|
||||
{:vector, _, _} ->
|
||||
validate_fn_body(rest, fn_ctx)
|
||||
|
||||
_ ->
|
||||
validate_fn_body(rest, fn_ctx)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_defn_body(rest, ctx, line, col) do
|
||||
case rest do
|
||||
[] ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "defn requires a parameter vector and body after docstring",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
[{:vector, _, _} | body] ->
|
||||
validate_fn_body(body, ctx)
|
||||
|
||||
[{:list, _, _} | _] = clauses ->
|
||||
# Multi-arity after docstring
|
||||
Enum.flat_map(clauses, fn clause -> validate_fn_clause(clause, ctx) end)
|
||||
|
||||
_ ->
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_fn(args, meta, ctx) do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
fn_ctx = %{ctx | tail: true, in_fn: true, in_loop: false}
|
||||
|
||||
case args do
|
||||
[] ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "fn requires a parameter vector and body",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
# (fn [params] body...) - single arity
|
||||
[{:vector, _, _} | body] ->
|
||||
validate_fn_body(body, fn_ctx)
|
||||
|
||||
# (fn name [params] body...) - named fn
|
||||
[{:symbol, _, _}, {:vector, _, _} | body] ->
|
||||
validate_fn_body(body, fn_ctx)
|
||||
|
||||
# (fn (clause1) (clause2) ...) - multi-arity
|
||||
[{:list, _, _} | _] = clauses ->
|
||||
Enum.flat_map(clauses, fn clause -> validate_fn_clause(clause, fn_ctx) end)
|
||||
|
||||
# (fn name (clause1) (clause2) ...) - named multi-arity
|
||||
[{:symbol, _, _} | [{:list, _, _} | _] = clauses] ->
|
||||
Enum.flat_map(clauses, fn clause -> validate_fn_clause(clause, fn_ctx) end)
|
||||
|
||||
_ ->
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_fn_clause({:list, _meta, [{:vector, _, _} | body]}, ctx) do
|
||||
validate_fn_body(body, ctx)
|
||||
end
|
||||
|
||||
defp validate_fn_clause(_other, _ctx), do: []
|
||||
|
||||
defp validate_fn_body([], _ctx), do: []
|
||||
|
||||
defp validate_fn_body(body, ctx) do
|
||||
{leading, [last]} = Enum.split(body, -1)
|
||||
non_tail = %{ctx | tail: false}
|
||||
|
||||
leading_diags = Enum.flat_map(leading, fn form -> validate_form(form, non_tail) end)
|
||||
last_diags = validate_form(last, ctx)
|
||||
leading_diags ++ last_diags
|
||||
end
|
||||
|
||||
defp validate_let(args, meta, ctx) do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
case args do
|
||||
[] ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "let requires a binding vector and body",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
[{:vector, vmeta, bindings} | body] ->
|
||||
binding_diags = validate_binding_vector(bindings, vmeta, "let")
|
||||
|
||||
body_diags =
|
||||
case body do
|
||||
[] ->
|
||||
[
|
||||
%{
|
||||
severity: :warning,
|
||||
message: "let with no body expression always returns nil",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
_ ->
|
||||
validate_body_forms(body, ctx)
|
||||
end
|
||||
|
||||
binding_diags ++ body_diags
|
||||
|
||||
_ ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "let requires a binding vector as its first argument",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_if(args, meta, ctx) do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
case length(args) do
|
||||
n when n < 2 ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "if requires a condition and at least a then branch (got #{n} argument(s))",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
n when n > 3 ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "if accepts at most 3 arguments (condition, then, else), got #{n}",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
2 ->
|
||||
[condition, then_branch] = args
|
||||
non_tail = %{ctx | tail: false}
|
||||
|
||||
validate_form(condition, non_tail) ++
|
||||
validate_form(then_branch, ctx)
|
||||
|
||||
3 ->
|
||||
[condition, then_branch, else_branch] = args
|
||||
non_tail = %{ctx | tail: false}
|
||||
|
||||
validate_form(condition, non_tail) ++
|
||||
validate_form(then_branch, ctx) ++
|
||||
validate_form(else_branch, ctx)
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_case(args, meta, ctx) do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
case args do
|
||||
[] ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "case requires a subject expression and at least one pattern/body pair",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
[_subject] ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "case requires at least one pattern/body pair after the subject",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
[subject | pairs] ->
|
||||
non_tail = %{ctx | tail: false}
|
||||
subject_diags = validate_form(subject, non_tail)
|
||||
|
||||
pair_diags =
|
||||
if rem(length(pairs), 2) != 0 do
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message:
|
||||
"case requires an even number of pattern/body forms, got #{length(pairs)}",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
else
|
||||
pairs
|
||||
|> Enum.chunk_every(2)
|
||||
|> Enum.flat_map(fn
|
||||
[_pattern, body] ->
|
||||
validate_form(body, ctx)
|
||||
|
||||
_ ->
|
||||
[]
|
||||
end)
|
||||
end
|
||||
|
||||
subject_diags ++ pair_diags
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_cond(args, meta, ctx) do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
if rem(length(args), 2) != 0 do
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "cond requires an even number of test/expression pairs, got #{length(args)}",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
else
|
||||
non_tail = %{ctx | tail: false}
|
||||
|
||||
args
|
||||
|> Enum.chunk_every(2)
|
||||
|> Enum.flat_map(fn
|
||||
[test, body] ->
|
||||
validate_form(test, non_tail) ++ validate_form(body, ctx)
|
||||
|
||||
_ ->
|
||||
[]
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_loop(args, meta, ctx) do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
case args do
|
||||
[] ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "loop requires a binding vector and body",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
[{:vector, vmeta, bindings} | body] ->
|
||||
binding_diags = validate_binding_vector(bindings, vmeta, "loop")
|
||||
|
||||
body_diags =
|
||||
case body do
|
||||
[] ->
|
||||
[
|
||||
%{
|
||||
severity: :warning,
|
||||
message: "loop with no body expression always returns nil",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
_ ->
|
||||
loop_ctx = %{ctx | tail: true, in_loop: true}
|
||||
validate_body_forms(body, loop_ctx)
|
||||
end
|
||||
|
||||
binding_diags ++ body_diags
|
||||
|
||||
_ ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "loop requires a binding vector as its first argument",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_recur(meta, ctx) do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
cond do
|
||||
not ctx.tail ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "recur must be in tail position",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
not (ctx.in_loop or ctx.in_fn) ->
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "recur must be inside a loop or function body",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
|
||||
true ->
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_do(args, _meta, ctx) do
|
||||
validate_body_forms(args, ctx)
|
||||
end
|
||||
|
||||
defp validate_map_literal(elements, meta, _ctx) do
|
||||
if rem(length(elements), 2) != 0 do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message:
|
||||
"map literal requires an even number of forms (key-value pairs), got #{length(elements)}",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp validate_binding_vector(bindings, meta, form_name) do
|
||||
if rem(length(bindings), 2) != 0 do
|
||||
line = meta_line(meta)
|
||||
col = meta_col(meta)
|
||||
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message:
|
||||
"#{form_name} binding vector requires an even number of forms (name/value pairs), got #{length(bindings)}",
|
||||
line: line,
|
||||
col: col
|
||||
}
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_body_forms([], _ctx), do: []
|
||||
|
||||
defp validate_body_forms(forms, ctx) do
|
||||
{leading, [last]} = Enum.split(forms, -1)
|
||||
non_tail = %{ctx | tail: false}
|
||||
|
||||
leading_diags = Enum.flat_map(leading, fn form -> validate_form(form, non_tail) end)
|
||||
last_diags = validate_form(last, ctx)
|
||||
leading_diags ++ last_diags
|
||||
end
|
||||
|
||||
defp meta_line(meta) when is_map(meta), do: Map.get(meta, :line, 0)
|
||||
defp meta_line(meta) when is_list(meta), do: Keyword.get(meta, :line, 0)
|
||||
defp meta_line(_), do: 0
|
||||
|
||||
defp meta_col(meta) when is_map(meta), do: Map.get(meta, :col, 0)
|
||||
defp meta_col(meta) when is_list(meta), do: Keyword.get(meta, :col, 0)
|
||||
defp meta_col(_), do: 0
|
||||
end
|
||||
@@ -0,0 +1,312 @@
|
||||
defmodule CljElixir.Compiler do
|
||||
@moduledoc """
|
||||
Orchestrates the CljElixir compilation pipeline.
|
||||
|
||||
Chains: Reader -> Analyzer -> Transformer -> Elixir compilation.
|
||||
|
||||
The pipeline:
|
||||
|
||||
1. **Reader** (`CljElixir.Reader`) - Parses source text into CljElixir AST
|
||||
(s-expression forms represented as Elixir terms).
|
||||
2. **Analyzer** (`CljElixir.Analyzer`) - Validates the AST, checking special form
|
||||
arity, map literal structure, recur position, etc.
|
||||
3. **Transformer** (`CljElixir.Transformer`) - Converts CljElixir AST into Elixir
|
||||
AST (`{operation, metadata, arguments}` tuples).
|
||||
4. **Elixir Compiler** - `Code.eval_quoted/3` or `Code.compile_quoted/2` handles
|
||||
macro expansion, protocol consolidation, and BEAM bytecode generation.
|
||||
"""
|
||||
|
||||
@doc """
|
||||
Compile a CljElixir source string to Elixir AST.
|
||||
|
||||
Runs the full pipeline: read -> analyze -> transform.
|
||||
|
||||
Returns `{:ok, elixir_ast}` on success, or `{:error, diagnostics}` on failure.
|
||||
|
||||
## Options
|
||||
|
||||
* `:file` - the source file path for error reporting (default: `"nofile"`)
|
||||
"""
|
||||
@spec compile_string(String.t(), keyword()) :: {:ok, term()} | {:error, list()}
|
||||
def compile_string(source, opts \\ []) do
|
||||
file = opts[:file] || "nofile"
|
||||
|
||||
with {:ok, forms} <- read(source, file),
|
||||
{:ok, forms} <- analyze(forms, opts),
|
||||
{:ok, elixir_ast} <- transform(forms, opts) do
|
||||
{:ok, elixir_ast}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Compile a `.clje` file to Elixir AST.
|
||||
|
||||
Reads the file from disk and delegates to `compile_string/2` with the
|
||||
`:file` option set automatically.
|
||||
|
||||
Returns `{:ok, elixir_ast}` on success, or `{:error, diagnostics}` on failure.
|
||||
Raises `File.Error` if the file cannot be read.
|
||||
"""
|
||||
@spec compile_file(Path.t(), keyword()) :: {:ok, term()} | {:error, list()}
|
||||
def compile_file(path, opts \\ []) do
|
||||
case File.read(path) do
|
||||
{:ok, source} ->
|
||||
compile_string(source, Keyword.put(opts, :file, path))
|
||||
|
||||
{:error, reason} ->
|
||||
{:error,
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "could not read file #{path}: #{:file.format_error(reason)}",
|
||||
file: path,
|
||||
line: 0,
|
||||
col: 0
|
||||
}
|
||||
]}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Compile and evaluate a CljElixir source string.
|
||||
|
||||
Compiles the source to Elixir AST and evaluates it via `Code.eval_quoted/3`.
|
||||
|
||||
Returns `{:ok, result, bindings}` on success, or `{:error, diagnostics}` on failure.
|
||||
|
||||
## Options
|
||||
|
||||
* `:file` - the source file path for error reporting (default: `"nofile"`)
|
||||
* `:bindings` - variable bindings for evaluation (default: `[]`)
|
||||
* `:env` - the macro environment for evaluation
|
||||
"""
|
||||
@spec eval_string(String.t(), keyword()) :: {:ok, term(), keyword()} | {:error, list()}
|
||||
def eval_string(source, opts \\ []) do
|
||||
with {:ok, ast} <- compile_string(source, opts) do
|
||||
try do
|
||||
bindings = opts[:bindings] || []
|
||||
env_opts = build_eval_opts(opts)
|
||||
{result, new_bindings} = Code.eval_quoted(ast, bindings, env_opts)
|
||||
{:ok, result, new_bindings}
|
||||
rescue
|
||||
e ->
|
||||
file = opts[:file] || "nofile"
|
||||
|
||||
{:error,
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: format_eval_error(e),
|
||||
file: file,
|
||||
line: extract_line(e),
|
||||
col: 0
|
||||
}
|
||||
]}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Compile and evaluate a `.clje` file.
|
||||
|
||||
Reads the file from disk and delegates to `eval_string/2`.
|
||||
|
||||
Returns `{:ok, result, bindings}` on success, or `{:error, diagnostics}` on failure.
|
||||
Raises `File.Error` if the file cannot be read.
|
||||
"""
|
||||
@spec eval_file(Path.t(), keyword()) :: {:ok, term(), keyword()} | {:error, list()}
|
||||
def eval_file(path, opts \\ []) do
|
||||
case File.read(path) do
|
||||
{:ok, source} ->
|
||||
eval_string(source, Keyword.put(opts, :file, path))
|
||||
|
||||
{:error, reason} ->
|
||||
{:error,
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "could not read file #{path}: #{:file.format_error(reason)}",
|
||||
file: path,
|
||||
line: 0,
|
||||
col: 0
|
||||
}
|
||||
]}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Compile a CljElixir source string to BEAM modules and write .beam files.
|
||||
|
||||
Compiles the source to Elixir AST and then uses `Code.compile_quoted/2` to
|
||||
produce BEAM bytecode modules.
|
||||
|
||||
Returns `{:ok, [{module, binary}]}` on success, or `{:error, diagnostics}` on failure.
|
||||
|
||||
## Options
|
||||
|
||||
* `:file` - the source file path for error reporting (default: `"nofile"`)
|
||||
"""
|
||||
@spec compile_to_beam(String.t(), keyword()) ::
|
||||
{:ok, [{module(), binary()}]} | {:error, list()}
|
||||
def compile_to_beam(source, opts \\ []) do
|
||||
with {:ok, ast} <- compile_string(source, opts) do
|
||||
try do
|
||||
file = opts[:file] || "nofile"
|
||||
modules = Code.compile_quoted(ast, file)
|
||||
{:ok, modules}
|
||||
rescue
|
||||
e ->
|
||||
file = opts[:file] || "nofile"
|
||||
|
||||
{:error,
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "BEAM compilation failed: #{format_eval_error(e)}",
|
||||
file: file,
|
||||
line: extract_line(e),
|
||||
col: 0
|
||||
}
|
||||
]}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Compile a `.clje` file to BEAM modules and write .beam files to the given
|
||||
output directory.
|
||||
|
||||
Returns `{:ok, [{module, binary}]}` on success, or `{:error, diagnostics}` on failure.
|
||||
|
||||
## Options
|
||||
|
||||
* `:output_dir` - directory to write .beam files (default: does not write)
|
||||
"""
|
||||
@spec compile_file_to_beam(Path.t(), keyword()) ::
|
||||
{:ok, [{module(), binary()}]} | {:error, list()}
|
||||
def compile_file_to_beam(path, opts \\ []) do
|
||||
case File.read(path) do
|
||||
{:ok, source} ->
|
||||
opts = Keyword.put(opts, :file, path)
|
||||
|
||||
with {:ok, modules} <- compile_to_beam(source, opts) do
|
||||
case opts[:output_dir] do
|
||||
nil ->
|
||||
{:ok, modules}
|
||||
|
||||
output_dir ->
|
||||
write_beam_files(modules, output_dir)
|
||||
end
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
{:error,
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "could not read file #{path}: #{:file.format_error(reason)}",
|
||||
file: path,
|
||||
line: 0,
|
||||
col: 0
|
||||
}
|
||||
]}
|
||||
end
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Private helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp read(source, file) do
|
||||
case CljElixir.Reader.read_string(source) do
|
||||
{:ok, forms} ->
|
||||
{:ok, forms}
|
||||
|
||||
{:error, reason} when is_binary(reason) ->
|
||||
{:error,
|
||||
[%{severity: :error, message: "read error: #{reason}", file: file, line: 0, col: 0}]}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error,
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "read error: #{inspect(reason)}",
|
||||
file: file,
|
||||
line: 0,
|
||||
col: 0
|
||||
}
|
||||
]}
|
||||
end
|
||||
end
|
||||
|
||||
defp analyze(forms, _opts) do
|
||||
case CljElixir.Analyzer.analyze(forms) do
|
||||
{:ok, analyzed_forms} ->
|
||||
{:ok, analyzed_forms}
|
||||
|
||||
{:error, diagnostics} when is_list(diagnostics) ->
|
||||
{:error, diagnostics}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error,
|
||||
[%{severity: :error, message: "analysis error: #{inspect(reason)}", line: 0, col: 0}]}
|
||||
end
|
||||
end
|
||||
|
||||
defp transform(forms, opts) do
|
||||
try do
|
||||
ctx =
|
||||
if opts[:vector_as_list] do
|
||||
%CljElixir.Transformer.Context{vector_as_list: true}
|
||||
else
|
||||
%CljElixir.Transformer.Context{}
|
||||
end
|
||||
|
||||
elixir_ast = CljElixir.Transformer.transform(forms, ctx)
|
||||
{:ok, elixir_ast}
|
||||
rescue
|
||||
e ->
|
||||
{:error,
|
||||
[
|
||||
%{
|
||||
severity: :error,
|
||||
message: "transform error: #{format_eval_error(e)}",
|
||||
line: 0,
|
||||
col: 0
|
||||
}
|
||||
]}
|
||||
end
|
||||
end
|
||||
|
||||
defp build_eval_opts(opts) do
|
||||
eval_opts = [file: opts[:file] || "nofile"]
|
||||
|
||||
case opts[:env] do
|
||||
nil -> eval_opts
|
||||
env -> Keyword.put(eval_opts, :env, env)
|
||||
end
|
||||
end
|
||||
|
||||
defp format_eval_error(%{__struct__: struct} = e) when is_atom(struct) do
|
||||
Exception.message(e)
|
||||
rescue
|
||||
_ -> inspect(e)
|
||||
end
|
||||
|
||||
defp format_eval_error(e), do: inspect(e)
|
||||
|
||||
defp extract_line(%{line: line}) when is_integer(line), do: line
|
||||
defp extract_line(_), do: 0
|
||||
|
||||
defp write_beam_files(modules, output_dir) do
|
||||
File.mkdir_p!(output_dir)
|
||||
|
||||
Enum.each(modules, fn {module, binary} ->
|
||||
beam_path = Path.join(output_dir, "#{module}.beam")
|
||||
File.write!(beam_path, binary)
|
||||
end)
|
||||
|
||||
{:ok, modules}
|
||||
end
|
||||
end
|
||||
@@ -0,0 +1,112 @@
|
||||
defmodule CljElixir.Equality do
|
||||
@moduledoc """
|
||||
Cross-type equality for CljElixir.
|
||||
|
||||
Handles the case where `(= [1 2 3] '(1 2 3))` should return true —
|
||||
PersistentVector and list are both sequential types with the same elements.
|
||||
"""
|
||||
|
||||
def equiv(a, b) when a === b, do: true
|
||||
|
||||
def equiv(%CljElixir.PersistentVector{} = a, %CljElixir.PersistentVector{} = b) do
|
||||
a.cnt == b.cnt and CljElixir.PersistentVector.to_list(a) == CljElixir.PersistentVector.to_list(b)
|
||||
end
|
||||
|
||||
def equiv(%CljElixir.PersistentVector{} = a, b) when is_list(b) do
|
||||
CljElixir.PersistentVector.to_list(a) == b
|
||||
end
|
||||
|
||||
def equiv(a, %CljElixir.PersistentVector{} = b) when is_list(a) do
|
||||
a == CljElixir.PersistentVector.to_list(b)
|
||||
end
|
||||
|
||||
def equiv(%CljElixir.SubVector{} = a, b) do
|
||||
CljElixir.SubVector.sv_to_list(a) |> equiv_list(b)
|
||||
end
|
||||
|
||||
def equiv(a, %CljElixir.SubVector{} = b) do
|
||||
equiv_list(CljElixir.SubVector.sv_to_list(b), a)
|
||||
end
|
||||
|
||||
def equiv(a, b), do: a == b
|
||||
|
||||
defp equiv_list(list, other) when is_list(other), do: list == other
|
||||
|
||||
defp equiv_list(list, %CljElixir.PersistentVector{} = pv) do
|
||||
list == CljElixir.PersistentVector.to_list(pv)
|
||||
end
|
||||
|
||||
defp equiv_list(_, _), do: false
|
||||
end
|
||||
|
||||
defimpl Enumerable, for: CljElixir.PersistentVector do
|
||||
def count(pv), do: {:ok, pv.cnt}
|
||||
|
||||
def member?(_pv, _value), do: {:error, __MODULE__}
|
||||
|
||||
def reduce(_pv, {:halt, acc}, _fun), do: {:halted, acc}
|
||||
def reduce(pv, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(pv, &1, fun)}
|
||||
|
||||
def reduce(pv, {:cont, acc}, fun) do
|
||||
list = CljElixir.PersistentVector.to_list(pv)
|
||||
Enumerable.List.reduce(list, {:cont, acc}, fun)
|
||||
end
|
||||
|
||||
def slice(pv) do
|
||||
size = pv.cnt
|
||||
{:ok, size, &slice_fun(pv, &1, &2, &3)}
|
||||
end
|
||||
|
||||
defp slice_fun(pv, start, length, step) do
|
||||
start..(start + (length - 1) * step)//step
|
||||
|> Enum.map(fn i -> CljElixir.PersistentVector.pv_nth(pv, i) end)
|
||||
end
|
||||
end
|
||||
|
||||
defimpl Collectable, for: CljElixir.PersistentVector do
|
||||
def into(pv) do
|
||||
collector_fun = fn
|
||||
acc, {:cont, elem} -> CljElixir.PersistentVector.pv_conj(acc, elem)
|
||||
acc, :done -> acc
|
||||
_acc, :halt -> :ok
|
||||
end
|
||||
|
||||
{pv, collector_fun}
|
||||
end
|
||||
end
|
||||
|
||||
defimpl Enumerable, for: Tuple do
|
||||
def count(t), do: {:ok, tuple_size(t)}
|
||||
|
||||
def member?(_t, _value), do: {:error, __MODULE__}
|
||||
|
||||
def reduce(_t, {:halt, acc}, _fun), do: {:halted, acc}
|
||||
def reduce(t, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(t, &1, fun)}
|
||||
|
||||
def reduce(t, {:cont, acc}, fun) do
|
||||
list = Tuple.to_list(t)
|
||||
Enumerable.List.reduce(list, {:cont, acc}, fun)
|
||||
end
|
||||
|
||||
def slice(t) do
|
||||
size = tuple_size(t)
|
||||
{:ok, size, &slice_fun(t, &1, &2, &3)}
|
||||
end
|
||||
|
||||
defp slice_fun(t, start, length, step) do
|
||||
start..(start + (length - 1) * step)//step
|
||||
|> Enum.map(fn i -> elem(t, i) end)
|
||||
end
|
||||
end
|
||||
|
||||
defimpl Collectable, for: Tuple do
|
||||
def into(t) do
|
||||
collector_fun = fn
|
||||
acc, {:cont, elem} -> :erlang.append_element(acc, elem)
|
||||
acc, :done -> acc
|
||||
_acc, :halt -> :ok
|
||||
end
|
||||
|
||||
{t, collector_fun}
|
||||
end
|
||||
end
|
||||
@@ -0,0 +1,368 @@
|
||||
defmodule CljElixir.Malli do
|
||||
@moduledoc """
|
||||
Converts Malli-style schema data to Elixir typespec AST.
|
||||
|
||||
Takes plain Elixir terms (atoms, lists, maps) representing Malli schemas
|
||||
and produces Elixir AST nodes suitable for `@spec` and `@type` attributes.
|
||||
|
||||
## Public API
|
||||
|
||||
* `spec_ast/2` - Generate `@spec` AST nodes from a function schema
|
||||
* `type_ast/2,3` - Generate `@type` AST nodes from a type schema
|
||||
* `schema_to_typespec/2` - Convert a schema to its typespec AST (the type part only)
|
||||
"""
|
||||
|
||||
# Atoms that need quoted syntax in source but are valid at runtime
|
||||
@arrow :"=>"
|
||||
@optional_marker :"?"
|
||||
|
||||
# ── Public API ──────────────────────────────────────────────────────
|
||||
|
||||
@doc """
|
||||
Generates a list of `@spec` AST nodes for the given function name and schema.
|
||||
|
||||
`schema` is either `[:=> ...]` for a single-arity function or
|
||||
`[:function ...]` for a multi-arity function.
|
||||
|
||||
Returns a list because `:function` schemas and optional params
|
||||
can produce multiple `@spec` entries.
|
||||
"""
|
||||
@spec spec_ast(atom(), list(), keyword()) :: list()
|
||||
def spec_ast(fun_name, schema, opts \\ [])
|
||||
|
||||
def spec_ast(fun_name, [:function | clauses], opts) do
|
||||
Enum.flat_map(clauses, fn clause -> spec_ast(fun_name, clause, opts) end)
|
||||
end
|
||||
|
||||
def spec_ast(fun_name, [@arrow, [:cat | param_schemas], return_schema], opts) do
|
||||
ret_ast = schema_to_typespec(return_schema, opts)
|
||||
param_groups = expand_optional_params(param_schemas)
|
||||
|
||||
Enum.map(param_groups, fn params ->
|
||||
param_asts = Enum.map(params, &schema_to_typespec(&1, opts))
|
||||
wrap_spec(fun_name, param_asts, ret_ast)
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generates a `@type` AST node for the given type name and schema.
|
||||
|
||||
Accepts an optional `opts` keyword list with `:known_types` for cross-references.
|
||||
For schemas with a `:registry` key, generates multiple types from the registry.
|
||||
"""
|
||||
@spec type_ast(atom(), list(), keyword()) :: tuple() | list()
|
||||
def type_ast(type_name, schema, opts \\ [])
|
||||
|
||||
def type_ast(type_name, [:schema, %{registry: registry}, ref_schema], opts) do
|
||||
type_ast_registry(type_name, [:schema, %{registry: registry}, ref_schema], registry, opts)
|
||||
end
|
||||
|
||||
def type_ast(type_name, schema, opts) do
|
||||
type_body = schema_to_typespec(schema, opts)
|
||||
wrap_type(type_name, type_body)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generates a list of `@type` AST nodes, one for each entry in the registry.
|
||||
|
||||
`registry_types` is a map of `{name_atom => schema}` pairs or a list of
|
||||
`{name_atom, schema}` tuples.
|
||||
"""
|
||||
def type_ast_registry(_type_name, [:schema, %{registry: _}, _ref], registry_types, opts) when is_map(registry_types) do
|
||||
Enum.map(registry_types, fn {name, schema} ->
|
||||
clean_name = clean_registry_name(name)
|
||||
body = schema_to_typespec(schema, Keyword.put(opts, :registry, registry_types))
|
||||
wrap_type(clean_name, body)
|
||||
end)
|
||||
end
|
||||
|
||||
def type_ast_registry(_type_name, [:schema, %{registry: _}, _ref], registry_types, opts) when is_list(registry_types) do
|
||||
Enum.map(registry_types, fn {name, schema} ->
|
||||
clean_name = clean_registry_name(name)
|
||||
body = schema_to_typespec(schema, Keyword.put(opts, :registry, Map.new(registry_types)))
|
||||
wrap_type(clean_name, body)
|
||||
end)
|
||||
end
|
||||
|
||||
# ── schema_to_typespec ──────────────────────────────────────────────
|
||||
|
||||
@doc """
|
||||
Converts a schema to its typespec AST representation (the type part,
|
||||
not the `@type` wrapper).
|
||||
|
||||
## Options
|
||||
|
||||
* `:known_types` - map of `%{"User" => :user, ...}` for cross-schema references
|
||||
* `:registry` - map of registry types for resolving `:ref` references
|
||||
"""
|
||||
@spec schema_to_typespec(term(), keyword()) :: term()
|
||||
def schema_to_typespec(schema, opts \\ [])
|
||||
|
||||
# ── Primitives ──────────────────────────────────────────────────────
|
||||
|
||||
def schema_to_typespec(:string, _opts), do: string_t_ast()
|
||||
def schema_to_typespec(:int, _opts), do: {:integer, [], []}
|
||||
def schema_to_typespec(:integer, _opts), do: {:integer, [], []}
|
||||
def schema_to_typespec(:float, _opts), do: {:float, [], []}
|
||||
def schema_to_typespec(:number, _opts), do: {:number, [], []}
|
||||
def schema_to_typespec(:boolean, _opts), do: {:boolean, [], []}
|
||||
def schema_to_typespec(:atom, _opts), do: {:atom, [], []}
|
||||
def schema_to_typespec(:keyword, _opts), do: {:atom, [], []}
|
||||
def schema_to_typespec(:any, _opts), do: {:any, [], []}
|
||||
def schema_to_typespec(:nil, _opts), do: nil
|
||||
def schema_to_typespec(:pid, _opts), do: {:pid, [], []}
|
||||
def schema_to_typespec(:port, _opts), do: {:port, [], []}
|
||||
def schema_to_typespec(:reference, _opts), do: {:reference, [], []}
|
||||
def schema_to_typespec(:"pos-int", _opts), do: {:pos_integer, [], []}
|
||||
def schema_to_typespec(:"neg-int", _opts), do: {:neg_integer, [], []}
|
||||
def schema_to_typespec(:"nat-int", _opts), do: {:non_neg_integer, [], []}
|
||||
|
||||
# ── Schema references (string keys) ────────────────────────────────
|
||||
|
||||
def schema_to_typespec(name, opts) when is_binary(name) do
|
||||
known = Keyword.get(opts, :known_types, %{})
|
||||
|
||||
case Map.fetch(known, name) do
|
||||
{:ok, type_name} -> {type_name, [], []}
|
||||
:error -> {:any, [], []}
|
||||
end
|
||||
end
|
||||
|
||||
# ── Literal values (atoms that aren't schema keywords) ──────────────
|
||||
|
||||
def schema_to_typespec(atom, _opts) when is_atom(atom), do: atom
|
||||
def schema_to_typespec(int, _opts) when is_integer(int), do: int
|
||||
|
||||
# ── Compound and container types (list schemas) ─────────────────────
|
||||
|
||||
def schema_to_typespec([head | _rest] = schema, opts) do
|
||||
convert_list_schema(head, schema, opts)
|
||||
end
|
||||
|
||||
# ── Fallback ───────────────────────────────────────────────────────
|
||||
|
||||
def schema_to_typespec(_, _opts), do: {:any, [], []}
|
||||
|
||||
# ── List schema dispatch ────────────────────────────────────────────
|
||||
|
||||
defp convert_list_schema(:or, [_ | types], opts) do
|
||||
type_asts = Enum.map(types, &schema_to_typespec(&1, opts))
|
||||
right_assoc_union(type_asts)
|
||||
end
|
||||
|
||||
defp convert_list_schema(:and, [_ | schemas], opts) do
|
||||
resolve_and_type(schemas, opts)
|
||||
end
|
||||
|
||||
defp convert_list_schema(:maybe, [:maybe, schema], opts) do
|
||||
inner = schema_to_typespec(schema, opts)
|
||||
{:|, [], [inner, nil]}
|
||||
end
|
||||
|
||||
defp convert_list_schema(:enum, [_ | values], _opts) do
|
||||
right_assoc_union(values)
|
||||
end
|
||||
|
||||
defp convert_list_schema(:=, [:=, value], _opts), do: value
|
||||
|
||||
defp convert_list_schema(:map, [_ | field_specs], opts) do
|
||||
fields =
|
||||
Enum.map(field_specs, fn
|
||||
[name, {:optional, true}, schema] ->
|
||||
{name, schema_to_typespec(schema, opts)}
|
||||
|
||||
[name, schema] ->
|
||||
{name, schema_to_typespec(schema, opts)}
|
||||
end)
|
||||
|
||||
{:%{}, [], fields}
|
||||
end
|
||||
|
||||
defp convert_list_schema(:"map-of", [_, key_schema, val_schema], opts) do
|
||||
key_ast = schema_to_typespec(key_schema, opts)
|
||||
val_ast = schema_to_typespec(val_schema, opts)
|
||||
{:%{}, [], [{{:optional, [], [key_ast]}, val_ast}]}
|
||||
end
|
||||
|
||||
defp convert_list_schema(:list, [:list, elem_schema], opts) do
|
||||
[schema_to_typespec(elem_schema, opts)]
|
||||
end
|
||||
|
||||
defp convert_list_schema(:vector, _schema, _opts) do
|
||||
persistent_vector_t_ast()
|
||||
end
|
||||
|
||||
defp convert_list_schema(:set, _schema, _opts) do
|
||||
mapset_t_ast()
|
||||
end
|
||||
|
||||
defp convert_list_schema(:tuple, [_ | elem_schemas], opts) do
|
||||
elems = Enum.map(elem_schemas, &schema_to_typespec(&1, opts))
|
||||
{:{}, [], elems}
|
||||
end
|
||||
|
||||
defp convert_list_schema(:ref, [:ref, name], opts) do
|
||||
clean = clean_registry_name(name)
|
||||
registry = Keyword.get(opts, :registry, %{})
|
||||
|
||||
if Map.has_key?(registry, name) or Map.has_key?(registry, clean) do
|
||||
{clean, [], []}
|
||||
else
|
||||
known = Keyword.get(opts, :known_types, %{})
|
||||
|
||||
case Map.fetch(known, name) do
|
||||
{:ok, type_name} -> {type_name, [], []}
|
||||
:error -> {clean, [], []}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp convert_list_schema(:schema, [:schema, %{registry: registry}, ref_schema], opts) do
|
||||
merged_opts = Keyword.put(opts, :registry, registry)
|
||||
schema_to_typespec(ref_schema, merged_opts)
|
||||
end
|
||||
|
||||
defp convert_list_schema(:>, _, _opts), do: {:any, [], []}
|
||||
defp convert_list_schema(:>=, _, _opts), do: {:any, [], []}
|
||||
defp convert_list_schema(:<, _, _opts), do: {:any, [], []}
|
||||
defp convert_list_schema(:<=, _, _opts), do: {:any, [], []}
|
||||
|
||||
defp convert_list_schema(head, schema, opts) when head == @arrow do
|
||||
[@arrow, [:cat | params], ret] = schema
|
||||
param_asts = Enum.map(params, &schema_to_typespec(&1, opts))
|
||||
ret_ast = schema_to_typespec(ret, opts)
|
||||
[{:->, [], [param_asts, ret_ast]}]
|
||||
end
|
||||
|
||||
defp convert_list_schema(_, _, _opts), do: {:any, [], []}
|
||||
|
||||
# ── Private helpers ─────────────────────────────────────────────────
|
||||
|
||||
defp string_t_ast do
|
||||
{{:., [], [{:__aliases__, [alias: false], [:String]}, :t]}, [], []}
|
||||
end
|
||||
|
||||
defp mapset_t_ast do
|
||||
{{:., [], [{:__aliases__, [alias: false], [:MapSet]}, :t]}, [], []}
|
||||
end
|
||||
|
||||
defp persistent_vector_t_ast do
|
||||
{{:., [], [{:__aliases__, [alias: false], [:CljElixir, :PersistentVector]}, :t]}, [], []}
|
||||
end
|
||||
|
||||
defp right_assoc_union([single]), do: single
|
||||
|
||||
defp right_assoc_union([first | rest]) do
|
||||
{:|, [], [first, right_assoc_union(rest)]}
|
||||
end
|
||||
|
||||
defp wrap_spec(fun_name, param_asts, ret_ast) do
|
||||
{:@, [], [
|
||||
{:spec, [], [
|
||||
{:"::", [], [
|
||||
{fun_name, [], param_asts},
|
||||
ret_ast
|
||||
]}
|
||||
]}
|
||||
]}
|
||||
end
|
||||
|
||||
defp wrap_type(type_name, body_ast) do
|
||||
{:@, [], [
|
||||
{:type, [], [
|
||||
{:"::", [], [
|
||||
{type_name, [], []},
|
||||
body_ast
|
||||
]}
|
||||
]}
|
||||
]}
|
||||
end
|
||||
|
||||
defp clean_registry_name(name) when is_atom(name) do
|
||||
name_str = Atom.to_string(name)
|
||||
|
||||
cleaned =
|
||||
name_str
|
||||
|> String.replace(~r/^(Elixir\.CljElixir\.|::)/, "")
|
||||
|
||||
String.to_atom(cleaned)
|
||||
end
|
||||
|
||||
defp clean_registry_name(name), do: name
|
||||
|
||||
# Expand optional params into all param combinations.
|
||||
# E.g., [:string, [:"?", :string], [:"?", :int]] produces:
|
||||
# [[:string], [:string, :string], [:string, :string, :int]]
|
||||
defp expand_optional_params(param_schemas) do
|
||||
{required, optionals} = split_required_optional(param_schemas)
|
||||
|
||||
for n <- 0..length(optionals) do
|
||||
required ++ Enum.take(optionals, n)
|
||||
end
|
||||
end
|
||||
|
||||
defp split_required_optional(params) do
|
||||
split_required_optional(params, [])
|
||||
end
|
||||
|
||||
defp split_required_optional([[@optional_marker, schema] | rest], req_acc) do
|
||||
optionals = [schema | extract_optionals(rest)]
|
||||
{Enum.reverse(req_acc), optionals}
|
||||
end
|
||||
|
||||
defp split_required_optional([param | rest], req_acc) do
|
||||
split_required_optional(rest, [param | req_acc])
|
||||
end
|
||||
|
||||
defp split_required_optional([], req_acc) do
|
||||
{Enum.reverse(req_acc), []}
|
||||
end
|
||||
|
||||
defp extract_optionals([[@optional_marker, schema] | rest]) do
|
||||
[schema | extract_optionals(rest)]
|
||||
end
|
||||
|
||||
defp extract_optionals([_ | rest]), do: extract_optionals(rest)
|
||||
defp extract_optionals([]), do: []
|
||||
|
||||
# Resolve :and types — extract most specific expressible type.
|
||||
# Special cases: [:and :int [:> 0]] -> pos_integer()
|
||||
# [:and :int [:>= 0]] -> non_neg_integer()
|
||||
defp resolve_and_type(schemas, opts) do
|
||||
base_types = Enum.filter(schemas, &recognized_schema?/1)
|
||||
constraints = Enum.filter(schemas, &constraint?/1)
|
||||
|
||||
case {base_types, constraints} do
|
||||
{[:int], [[:>, 0]]} -> {:pos_integer, [], []}
|
||||
{[:integer], [[:>, 0]]} -> {:pos_integer, [], []}
|
||||
{[:int], [[:>=, 0]]} -> {:non_neg_integer, [], []}
|
||||
{[:integer], [[:>=, 0]]} -> {:non_neg_integer, [], []}
|
||||
{[base | _], _} -> schema_to_typespec(base, opts)
|
||||
{[], _} -> {:any, [], []}
|
||||
end
|
||||
end
|
||||
|
||||
@primitive_types [
|
||||
:string, :int, :integer, :float, :number, :boolean, :atom, :keyword,
|
||||
:any, :nil, :pid, :port, :reference, :"pos-int", :"neg-int", :"nat-int"
|
||||
]
|
||||
|
||||
@compound_heads [:or, :and, :maybe, :enum, :=, :map, :"map-of",
|
||||
:list, :vector, :set, :tuple, :ref, :schema]
|
||||
|
||||
defp recognized_schema?(schema) when is_atom(schema) do
|
||||
schema in @primitive_types
|
||||
end
|
||||
|
||||
defp recognized_schema?([head | _]) when is_atom(head) do
|
||||
head in @compound_heads or head == @arrow
|
||||
end
|
||||
|
||||
defp recognized_schema?(_), do: false
|
||||
|
||||
defp constraint?([:>, _]), do: true
|
||||
defp constraint?([:>=, _]), do: true
|
||||
defp constraint?([:<, _]), do: true
|
||||
defp constraint?([:<=, _]), do: true
|
||||
defp constraint?(_), do: false
|
||||
end
|
||||
@@ -0,0 +1,647 @@
|
||||
defmodule CljElixir.Reader do
|
||||
@moduledoc """
|
||||
Reader for CljElixir: tokenizes source text and parses it into CljElixir AST.
|
||||
|
||||
The reader has two phases:
|
||||
1. Tokenizer — converts source text into a flat list of tokens
|
||||
2. Parser — recursive descent over the token list, producing CljElixir AST nodes
|
||||
|
||||
## AST representation
|
||||
|
||||
Literals are themselves: integers, floats, strings, booleans, nil, atoms (keywords).
|
||||
|
||||
Compound forms use tagged tuples:
|
||||
{:symbol, meta, name}
|
||||
{:list, meta, [elements]}
|
||||
{:vector, meta, [elements]}
|
||||
{:map, meta, [k1, v1, k2, v2, ...]}
|
||||
{:set, meta, [elements]}
|
||||
{:tuple, meta, [elements]}
|
||||
{:regex, meta, pattern}
|
||||
{:quote, meta, form}
|
||||
{:with_meta, meta, {metadata, target}}
|
||||
{:anon_fn, meta, body}
|
||||
{:quasiquote, meta, form}
|
||||
{:unquote, meta, form}
|
||||
{:splice_unquote, meta, form}
|
||||
{:deref, meta, form}
|
||||
"""
|
||||
|
||||
alias CljElixir.Reader.Token
|
||||
|
||||
# ── Public API ──────────────────────────────────────────────────────
|
||||
|
||||
@doc """
|
||||
Read a string of CljElixir source into a list of AST forms.
|
||||
|
||||
Returns `{:ok, [form]}` on success, `{:error, message}` on failure.
|
||||
"""
|
||||
@spec read_string(String.t()) :: {:ok, list()} | {:error, String.t()}
|
||||
def read_string(source) when is_binary(source) do
|
||||
case tokenize(source) do
|
||||
{:ok, tokens} ->
|
||||
parse_all(tokens, [])
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# ════════════════════════════════════════════════════════════════════
|
||||
# TOKENIZER
|
||||
# ════════════════════════════════════════════════════════════════════
|
||||
|
||||
@doc false
|
||||
def tokenize(source) do
|
||||
chars = String.to_charlist(source)
|
||||
tokenize_loop(chars, 1, 1, [])
|
||||
end
|
||||
|
||||
# ---------- end of input ----------
|
||||
defp tokenize_loop([], _line, _col, acc), do: {:ok, Enum.reverse(acc)}
|
||||
|
||||
# ---------- newline ----------
|
||||
defp tokenize_loop([?\n | rest], line, _col, acc),
|
||||
do: tokenize_loop(rest, line + 1, 1, acc)
|
||||
|
||||
defp tokenize_loop([?\r, ?\n | rest], line, _col, acc),
|
||||
do: tokenize_loop(rest, line + 1, 1, acc)
|
||||
|
||||
defp tokenize_loop([?\r | rest], line, _col, acc),
|
||||
do: tokenize_loop(rest, line + 1, 1, acc)
|
||||
|
||||
# ---------- whitespace / commas ----------
|
||||
defp tokenize_loop([c | rest], line, col, acc) when c in [?\s, ?\t, ?,],
|
||||
do: tokenize_loop(rest, line, col + 1, acc)
|
||||
|
||||
# ---------- comments ----------
|
||||
defp tokenize_loop([?; | rest], line, _col, acc) do
|
||||
rest = skip_comment(rest)
|
||||
# skip_comment stops at (but does not consume) the newline or EOF.
|
||||
# Let the main loop's newline handler increment line/col.
|
||||
tokenize_loop(rest, line, 1, acc)
|
||||
end
|
||||
|
||||
# ---------- strings ----------
|
||||
defp tokenize_loop([?" | rest], line, col, acc) do
|
||||
case read_string_literal(rest, line, col + 1, []) do
|
||||
{:ok, value, rest2, end_line, end_col} ->
|
||||
token = %Token{type: :string, value: value, line: line, col: col}
|
||||
tokenize_loop(rest2, end_line, end_col, [token | acc])
|
||||
|
||||
{:error, msg} ->
|
||||
{:error, msg}
|
||||
end
|
||||
end
|
||||
|
||||
# ---------- dispatch sequences: #{ #el[ #( #" ----------
|
||||
defp tokenize_loop([?#, ?e, ?l, ?[ | rest], line, col, acc) do
|
||||
token = %Token{type: :hash_el_lbracket, value: "#el[", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 4, [token | acc])
|
||||
end
|
||||
|
||||
defp tokenize_loop([?#, ?{ | rest], line, col, acc) do
|
||||
token = %Token{type: :hash_lbrace, value: "\#{", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 2, [token | acc])
|
||||
end
|
||||
|
||||
defp tokenize_loop([?#, ?( | rest], line, col, acc) do
|
||||
token = %Token{type: :hash_lparen, value: "#(", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 2, [token | acc])
|
||||
end
|
||||
|
||||
defp tokenize_loop([?#, ?" | rest], line, col, acc) do
|
||||
case read_string_literal(rest, line, col + 2, []) do
|
||||
{:ok, value, rest2, end_line, end_col} ->
|
||||
token = %Token{type: :hash_string, value: value, line: line, col: col}
|
||||
tokenize_loop(rest2, end_line, end_col, [token | acc])
|
||||
|
||||
{:error, msg} ->
|
||||
{:error, msg}
|
||||
end
|
||||
end
|
||||
|
||||
# ---------- splice-unquote ~@ (must come before unquote ~) ----------
|
||||
defp tokenize_loop([?~, ?@ | rest], line, col, acc) do
|
||||
token = %Token{type: :splice_unquote, value: "~@", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 2, [token | acc])
|
||||
end
|
||||
|
||||
# ---------- unquote ~ ----------
|
||||
defp tokenize_loop([?~ | rest], line, col, acc) do
|
||||
token = %Token{type: :unquote, value: "~", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
# ---------- delimiters ----------
|
||||
defp tokenize_loop([?( | rest], line, col, acc) do
|
||||
token = %Token{type: :lparen, value: "(", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
defp tokenize_loop([?) | rest], line, col, acc) do
|
||||
token = %Token{type: :rparen, value: ")", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
defp tokenize_loop([?[ | rest], line, col, acc) do
|
||||
token = %Token{type: :lbracket, value: "[", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
defp tokenize_loop([?] | rest], line, col, acc) do
|
||||
token = %Token{type: :rbracket, value: "]", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
defp tokenize_loop([?{ | rest], line, col, acc) do
|
||||
token = %Token{type: :lbrace, value: "{", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
defp tokenize_loop([?} | rest], line, col, acc) do
|
||||
token = %Token{type: :rbrace, value: "}", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
# ---------- quote ' ----------
|
||||
defp tokenize_loop([?' | rest], line, col, acc) do
|
||||
token = %Token{type: :quote, value: "'", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
# ---------- quasiquote ` ----------
|
||||
defp tokenize_loop([?` | rest], line, col, acc) do
|
||||
token = %Token{type: :quasiquote, value: "`", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
# ---------- metadata ^ ----------
|
||||
defp tokenize_loop([?^ | rest], line, col, acc) do
|
||||
token = %Token{type: :meta, value: "^", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
# ---------- deref @ ----------
|
||||
defp tokenize_loop([?@ | rest], line, col, acc) do
|
||||
token = %Token{type: :deref, value: "@", line: line, col: col}
|
||||
tokenize_loop(rest, line, col + 1, [token | acc])
|
||||
end
|
||||
|
||||
# ---------- keywords ----------
|
||||
defp tokenize_loop([?: | rest], line, col, acc) do
|
||||
case read_keyword(rest, line, col) do
|
||||
{:ok, kw_value, rest2, end_col} ->
|
||||
token = %Token{type: :keyword, value: kw_value, line: line, col: col}
|
||||
tokenize_loop(rest2, line, end_col, [token | acc])
|
||||
|
||||
{:error, msg} ->
|
||||
{:error, msg}
|
||||
end
|
||||
end
|
||||
|
||||
# ---------- negative numbers: -<digit> ----------
|
||||
# Since whitespace is always consumed before reaching tokenize_loop,
|
||||
# a standalone `-` followed by a digit is always a negative number literal.
|
||||
# The `-` inside symbol names (like `my-func`) is consumed by the symbol reader
|
||||
# and never reaches this clause as a standalone character.
|
||||
defp tokenize_loop([?- | rest], line, col, acc) do
|
||||
if starts_with_digit?(rest) do
|
||||
{:ok, token, rest2, end_col} = read_number(rest, line, col + 1, [?-])
|
||||
token = %{token | line: line, col: col}
|
||||
tokenize_loop(rest2, line, end_col, [token | acc])
|
||||
else
|
||||
# It's a symbol starting with -
|
||||
case read_symbol([?- | rest], line, col) do
|
||||
{:ok, token, rest2, end_col} ->
|
||||
tokenize_loop(rest2, line, end_col, [token | acc])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# ---------- numbers ----------
|
||||
defp tokenize_loop([c | _] = chars, line, col, acc) when c in ?0..?9 do
|
||||
{:ok, token, rest2, end_col} = read_number(chars, line, col, [])
|
||||
tokenize_loop(rest2, line, end_col, [token | acc])
|
||||
end
|
||||
|
||||
# ---------- symbols (and true/false/nil) ----------
|
||||
defp tokenize_loop([c | _] = chars, line, col, acc)
|
||||
when c in ?a..?z or c in ?A..?Z or
|
||||
c == ?_ or c == ?* or c == ?! or c == ?? or
|
||||
c == ?< or c == ?> or c == ?= or c == ?+ or
|
||||
c == ?. or c == ?& or c == ?% do
|
||||
case read_symbol(chars, line, col) do
|
||||
{:ok, token, rest, end_col} ->
|
||||
tokenize_loop(rest, line, end_col, [token | acc])
|
||||
end
|
||||
end
|
||||
|
||||
# ---------- catch-all: unexpected character ----------
|
||||
defp tokenize_loop([c | _], line, col, _acc) do
|
||||
{:error, "Unexpected character '#{<<c::utf8>>}' at line #{line}, col #{col}"}
|
||||
end
|
||||
|
||||
# ── Tokenizer helpers ───────────────────────────────────────────────
|
||||
|
||||
# Characters that can continue a symbol (after the start)
|
||||
defp symbol_continue_char?(c) when c in ?a..?z, do: true
|
||||
defp symbol_continue_char?(c) when c in ?A..?Z, do: true
|
||||
defp symbol_continue_char?(c) when c in ?0..?9, do: true
|
||||
defp symbol_continue_char?(c) when c in [?_, ?*, ?!, ??, ?<, ?>, ?=, ?+, ?-, ?/, ?., ?%, ?&, ?#], do: true
|
||||
defp symbol_continue_char?(_), do: false
|
||||
|
||||
defp starts_with_digit?([c | _]) when c in ?0..?9, do: true
|
||||
defp starts_with_digit?(_), do: false
|
||||
|
||||
defp skip_comment([?\n | _] = rest), do: rest
|
||||
defp skip_comment([?\r | _] = rest), do: rest
|
||||
defp skip_comment([]), do: []
|
||||
defp skip_comment([_ | rest]), do: skip_comment(rest)
|
||||
|
||||
# ── String literal reader ──────────────────────────────────────────
|
||||
|
||||
defp read_string_literal([], line, _col, _acc),
|
||||
do: {:error, "Unterminated string starting at line #{line}"}
|
||||
|
||||
defp read_string_literal([?" | rest], line, col, acc),
|
||||
do: {:ok, IO.chardata_to_string(Enum.reverse(acc)), rest, line, col + 1}
|
||||
|
||||
defp read_string_literal([?\\, ?" | rest], line, col, acc),
|
||||
do: read_string_literal(rest, line, col + 2, [?" | acc])
|
||||
|
||||
defp read_string_literal([?\\, ?\\ | rest], line, col, acc),
|
||||
do: read_string_literal(rest, line, col + 2, [?\\ | acc])
|
||||
|
||||
defp read_string_literal([?\\, ?n | rest], line, col, acc),
|
||||
do: read_string_literal(rest, line, col + 2, [?\n | acc])
|
||||
|
||||
defp read_string_literal([?\\, ?t | rest], line, col, acc),
|
||||
do: read_string_literal(rest, line, col + 2, [?\t | acc])
|
||||
|
||||
defp read_string_literal([?\\, ?r | rest], line, col, acc),
|
||||
do: read_string_literal(rest, line, col + 2, [?\r | acc])
|
||||
|
||||
defp read_string_literal([?\n | rest], line, _col, acc),
|
||||
do: read_string_literal(rest, line + 1, 1, [?\n | acc])
|
||||
|
||||
defp read_string_literal([c | rest], line, col, acc),
|
||||
do: read_string_literal(rest, line, col + 1, [c | acc])
|
||||
|
||||
# ── Keyword reader ─────────────────────────────────────────────────
|
||||
|
||||
# Quoted keyword: :"some-name"
|
||||
defp read_keyword([?" | rest], line, col) do
|
||||
case read_string_literal(rest, line, col + 2, []) do
|
||||
{:ok, value, rest2, _end_line, end_col} ->
|
||||
{:ok, String.to_atom(value), rest2, end_col}
|
||||
|
||||
{:error, msg} ->
|
||||
{:error, msg}
|
||||
end
|
||||
end
|
||||
|
||||
# Regular keyword: :name, :my-key, :ok
|
||||
defp read_keyword(chars, _line, col) do
|
||||
{name_chars, rest} = take_keyword_chars(chars, [])
|
||||
|
||||
case name_chars do
|
||||
[] ->
|
||||
{:error, "Expected keyword name after ':'"}
|
||||
|
||||
_ ->
|
||||
name = IO.chardata_to_string(Enum.reverse(name_chars))
|
||||
atom_val = String.to_atom(name)
|
||||
{:ok, atom_val, rest, col + 1 + length(name_chars)}
|
||||
end
|
||||
end
|
||||
|
||||
defp take_keyword_chars([c | rest], acc) when c in ?a..?z or c in ?A..?Z or c in ?0..?9 or c in [?_, ?-, ?!, ??, ?., ?/, ?*, ?+, ?>, ?<, ?=, ?&, ?#],
|
||||
do: take_keyword_chars(rest, [c | acc])
|
||||
|
||||
defp take_keyword_chars(rest, acc), do: {acc, rest}
|
||||
|
||||
# ── Number reader ──────────────────────────────────────────────────
|
||||
|
||||
defp read_number(chars, line, col, prefix) do
|
||||
{digit_chars, rest} = take_digits(chars, prefix)
|
||||
|
||||
case rest do
|
||||
[?. | after_dot] ->
|
||||
case after_dot do
|
||||
[d | _] when d in ?0..?9 ->
|
||||
{frac_chars, rest2} = take_digits(after_dot, [?. | digit_chars])
|
||||
str = IO.chardata_to_string(Enum.reverse(frac_chars))
|
||||
{float_val, ""} = Float.parse(str)
|
||||
end_col = col + String.length(str) - length(prefix)
|
||||
token = %Token{type: :float, value: float_val, line: line, col: col}
|
||||
{:ok, token, rest2, end_col}
|
||||
|
||||
_ ->
|
||||
# dot not followed by digit — just an integer, leave dot for next token
|
||||
str = IO.chardata_to_string(Enum.reverse(digit_chars))
|
||||
{int_val, ""} = Integer.parse(str)
|
||||
end_col = col + String.length(str) - length(prefix)
|
||||
token = %Token{type: :integer, value: int_val, line: line, col: col}
|
||||
{:ok, token, rest, end_col}
|
||||
end
|
||||
|
||||
_ ->
|
||||
str = IO.chardata_to_string(Enum.reverse(digit_chars))
|
||||
{int_val, ""} = Integer.parse(str)
|
||||
end_col = col + String.length(str) - length(prefix)
|
||||
token = %Token{type: :integer, value: int_val, line: line, col: col}
|
||||
{:ok, token, rest, end_col}
|
||||
end
|
||||
end
|
||||
|
||||
defp take_digits([c | rest], acc) when c in ?0..?9,
|
||||
do: take_digits(rest, [c | acc])
|
||||
|
||||
defp take_digits(rest, acc), do: {acc, rest}
|
||||
|
||||
# ── Symbol reader ──────────────────────────────────────────────────
|
||||
|
||||
defp read_symbol(chars, line, col) do
|
||||
{sym_chars, rest} = take_symbol_chars(chars, [])
|
||||
name = IO.chardata_to_string(Enum.reverse(sym_chars))
|
||||
end_col = col + String.length(name)
|
||||
|
||||
token =
|
||||
case name do
|
||||
"true" -> %Token{type: :boolean, value: true, line: line, col: col}
|
||||
"false" -> %Token{type: :boolean, value: false, line: line, col: col}
|
||||
"nil" -> %Token{type: :nil, value: nil, line: line, col: col}
|
||||
_ -> %Token{type: :symbol, value: name, line: line, col: col}
|
||||
end
|
||||
|
||||
{:ok, token, rest, end_col}
|
||||
end
|
||||
|
||||
defp take_symbol_chars([c | rest], acc) do
|
||||
if (acc == [] && symbol_start_char?(c)) || (acc != [] && symbol_continue_char?(c)) do
|
||||
take_symbol_chars(rest, [c | acc])
|
||||
else
|
||||
{acc, [c | rest]}
|
||||
end
|
||||
end
|
||||
|
||||
defp take_symbol_chars([], acc), do: {acc, []}
|
||||
|
||||
defp symbol_start_char?(c) when c in ?a..?z, do: true
|
||||
defp symbol_start_char?(c) when c in ?A..?Z, do: true
|
||||
defp symbol_start_char?(c) when c in [?_, ?*, ?!, ??, ?<, ?>, ?=, ?+, ?-, ?., ?&, ?%], do: true
|
||||
defp symbol_start_char?(_), do: false
|
||||
|
||||
# ════════════════════════════════════════════════════════════════════
|
||||
# PARSER — Recursive Descent
|
||||
# ════════════════════════════════════════════════════════════════════
|
||||
|
||||
# Parse all top-level forms until tokens are exhausted
|
||||
defp parse_all([], acc), do: {:ok, Enum.reverse(acc)}
|
||||
|
||||
defp parse_all(tokens, acc) do
|
||||
case parse_form(tokens) do
|
||||
{:ok, form, rest} ->
|
||||
parse_all(rest, [form | acc])
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# ── Parse a single form ────────────────────────────────────────────
|
||||
|
||||
# Literals
|
||||
defp parse_form([%Token{type: :integer, value: v} | rest]),
|
||||
do: {:ok, v, rest}
|
||||
|
||||
defp parse_form([%Token{type: :float, value: v} | rest]),
|
||||
do: {:ok, v, rest}
|
||||
|
||||
defp parse_form([%Token{type: :string, value: v} | rest]),
|
||||
do: {:ok, v, rest}
|
||||
|
||||
defp parse_form([%Token{type: :keyword, value: v} | rest]),
|
||||
do: {:ok, v, rest}
|
||||
|
||||
defp parse_form([%Token{type: :boolean, value: v} | rest]),
|
||||
do: {:ok, v, rest}
|
||||
|
||||
defp parse_form([%Token{type: :nil} | rest]),
|
||||
do: {:ok, nil, rest}
|
||||
|
||||
# Symbol
|
||||
defp parse_form([%Token{type: :symbol, value: name, line: l, col: c} | rest]),
|
||||
do: {:ok, {:symbol, %{line: l, col: c}, name}, rest}
|
||||
|
||||
# List ( ... )
|
||||
defp parse_form([%Token{type: :lparen, line: l, col: c} | rest]) do
|
||||
case parse_until(rest, :rparen) do
|
||||
{:ok, elements, rest2} ->
|
||||
{:ok, {:list, %{line: l, col: c}, elements}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Vector [ ... ]
|
||||
defp parse_form([%Token{type: :lbracket, line: l, col: c} | rest]) do
|
||||
case parse_until(rest, :rbracket) do
|
||||
{:ok, elements, rest2} ->
|
||||
{:ok, {:vector, %{line: l, col: c}, elements}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Map { ... }
|
||||
defp parse_form([%Token{type: :lbrace, line: l, col: c} | rest]) do
|
||||
case parse_until(rest, :rbrace) do
|
||||
{:ok, elements, rest2} ->
|
||||
{:ok, {:map, %{line: l, col: c}, elements}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Set #{ ... }
|
||||
defp parse_form([%Token{type: :hash_lbrace, line: l, col: c} | rest]) do
|
||||
case parse_until(rest, :rbrace) do
|
||||
{:ok, elements, rest2} ->
|
||||
{:ok, {:set, %{line: l, col: c}, elements}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# BEAM tuple #el[ ... ]
|
||||
defp parse_form([%Token{type: :hash_el_lbracket, line: l, col: c} | rest]) do
|
||||
case parse_until(rest, :rbracket) do
|
||||
{:ok, elements, rest2} ->
|
||||
{:ok, {:tuple, %{line: l, col: c}, elements}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Anonymous function #( ... )
|
||||
defp parse_form([%Token{type: :hash_lparen, line: l, col: c} | rest]) do
|
||||
case parse_until(rest, :rparen) do
|
||||
{:ok, elements, rest2} ->
|
||||
body = {:list, %{line: l, col: c}, elements}
|
||||
{:ok, {:anon_fn, %{line: l, col: c}, body}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Regex #"..."
|
||||
defp parse_form([%Token{type: :hash_string, value: pattern, line: l, col: c} | rest]),
|
||||
do: {:ok, {:regex, %{line: l, col: c}, pattern}, rest}
|
||||
|
||||
# Quote '
|
||||
defp parse_form([%Token{type: :quote, line: l, col: c} | rest]) do
|
||||
case parse_form(rest) do
|
||||
{:ok, form, rest2} ->
|
||||
{:ok, {:quote, %{line: l, col: c}, form}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Quasiquote `
|
||||
defp parse_form([%Token{type: :quasiquote, line: l, col: c} | rest]) do
|
||||
case parse_form(rest) do
|
||||
{:ok, form, rest2} ->
|
||||
{:ok, {:quasiquote, %{line: l, col: c}, form}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Unquote ~
|
||||
defp parse_form([%Token{type: :unquote, line: l, col: c} | rest]) do
|
||||
case parse_form(rest) do
|
||||
{:ok, form, rest2} ->
|
||||
{:ok, {:unquote, %{line: l, col: c}, form}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Splice-unquote ~@
|
||||
defp parse_form([%Token{type: :splice_unquote, line: l, col: c} | rest]) do
|
||||
case parse_form(rest) do
|
||||
{:ok, form, rest2} ->
|
||||
{:ok, {:splice_unquote, %{line: l, col: c}, form}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Deref @
|
||||
defp parse_form([%Token{type: :deref, line: l, col: c} | rest]) do
|
||||
case parse_form(rest) do
|
||||
{:ok, form, rest2} ->
|
||||
{:ok, {:deref, %{line: l, col: c}, form}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Metadata ^
|
||||
defp parse_form([%Token{type: :meta, line: l, col: c} | rest]) do
|
||||
case parse_meta_value(rest, l, c) do
|
||||
{:ok, meta_form, rest2} ->
|
||||
case parse_form(rest2) do
|
||||
{:ok, target, rest3} ->
|
||||
{:ok, {:with_meta, %{line: l, col: c}, {meta_form, target}}, rest3}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Unexpected token
|
||||
defp parse_form([%Token{type: type, line: l, col: c} | _]),
|
||||
do: {:error, "Unexpected token #{type} at line #{l}, col #{c}"}
|
||||
|
||||
defp parse_form([]),
|
||||
do: {:error, "Unexpected end of input"}
|
||||
|
||||
# ── Parse helpers ──────────────────────────────────────────────────
|
||||
|
||||
# Parse elements until a closing delimiter token type is found
|
||||
defp parse_until(tokens, closer) do
|
||||
parse_until_loop(tokens, closer, [])
|
||||
end
|
||||
|
||||
defp parse_until_loop([], closer, _acc) do
|
||||
name = delimiter_name(closer)
|
||||
{:error, "Unexpected end of input, expected '#{name}'"}
|
||||
end
|
||||
|
||||
defp parse_until_loop([%Token{type: type} | rest], closer, acc) when type == closer do
|
||||
{:ok, Enum.reverse(acc), rest}
|
||||
end
|
||||
|
||||
defp parse_until_loop(tokens, closer, acc) do
|
||||
case parse_form(tokens) do
|
||||
{:ok, form, rest} ->
|
||||
parse_until_loop(rest, closer, [form | acc])
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# Parse the value after ^ (metadata)
|
||||
# ^{...} — map metadata
|
||||
defp parse_meta_value([%Token{type: :lbrace, line: l, col: c} | rest], _ml, _mc) do
|
||||
case parse_until(rest, :rbrace) do
|
||||
{:ok, elements, rest2} ->
|
||||
{:ok, {:map, %{line: l, col: c}, elements}, rest2}
|
||||
|
||||
{:error, _} = err ->
|
||||
err
|
||||
end
|
||||
end
|
||||
|
||||
# ^:keyword — sugar for ^{:keyword true}
|
||||
defp parse_meta_value([%Token{type: :keyword, value: kw, line: l, col: c} | rest], _ml, _mc) do
|
||||
meta_map = {:map, %{line: l, col: c}, [kw, true]}
|
||||
{:ok, meta_map, rest}
|
||||
end
|
||||
|
||||
# ^symbol — sugar for ^{:tag symbol}
|
||||
defp parse_meta_value([%Token{type: :symbol} | _] = tokens, _ml, _mc) do
|
||||
case parse_form(tokens) do
|
||||
{:ok, form, rest} -> {:ok, form, rest}
|
||||
{:error, _} = err -> err
|
||||
end
|
||||
end
|
||||
|
||||
defp parse_meta_value(_tokens, ml, mc) do
|
||||
{:error, "Expected metadata value (map, keyword, or symbol) at line #{ml}, col #{mc}"}
|
||||
end
|
||||
|
||||
defp delimiter_name(:rparen), do: ")"
|
||||
defp delimiter_name(:rbracket), do: "]"
|
||||
defp delimiter_name(:rbrace), do: "}"
|
||||
end
|
||||
@@ -0,0 +1,14 @@
|
||||
defmodule CljElixir.Reader.Token do
|
||||
@moduledoc """
|
||||
A token produced by the CljElixir tokenizer.
|
||||
|
||||
Types:
|
||||
:integer, :float, :string, :keyword, :symbol, :boolean, :nil,
|
||||
:lparen, :rparen, :lbracket, :rbracket, :lbrace, :rbrace,
|
||||
:hash_lbrace, :hash_el_lbracket, :hash_lparen, :hash_string,
|
||||
:quote, :quasiquote, :unquote, :splice_unquote,
|
||||
:meta, :deref
|
||||
"""
|
||||
|
||||
defstruct [:type, :value, :line, :col]
|
||||
end
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,336 @@
|
||||
defmodule Mix.Tasks.Compile.CljElixir do
|
||||
@moduledoc """
|
||||
Mix compiler plugin for CljElixir `.clje` files.
|
||||
|
||||
Integrates `.clje` source files into the standard Mix build pipeline.
|
||||
Supports incremental compilation via a manifest that tracks source file
|
||||
modification times and the modules they produce.
|
||||
|
||||
## Configuration
|
||||
|
||||
In your `mix.exs`, add `:compile.clj_elixir` to your compilers and
|
||||
configure source paths:
|
||||
|
||||
def project do
|
||||
[
|
||||
compilers: [:clj_elixir] ++ Mix.compilers(),
|
||||
clj_elixir_paths: ["src"]
|
||||
]
|
||||
end
|
||||
|
||||
## How It Works
|
||||
|
||||
1. Scans configured source paths for `.clje` files
|
||||
2. Checks the manifest for previously compiled files and their mtimes
|
||||
3. Compiles only stale files (new or modified since last build)
|
||||
4. Writes `.beam` files to the build output directory
|
||||
5. Updates the manifest with new module info
|
||||
6. Returns `{:ok, diagnostics}` or `{:error, diagnostics}`
|
||||
|
||||
## Manifest
|
||||
|
||||
The manifest is stored at `_build/ENV/.clj_elixir_manifest` and tracks
|
||||
`{source_path, mtime, [module_names]}` tuples for incremental compilation.
|
||||
"""
|
||||
|
||||
use Mix.Task.Compiler
|
||||
|
||||
@manifest_filename ".clj_elixir_manifest"
|
||||
@recursive true
|
||||
|
||||
@impl true
|
||||
def run(argv) do
|
||||
{opts, _, _} =
|
||||
OptionParser.parse(argv,
|
||||
switches: [force: :boolean, verbose: :boolean],
|
||||
aliases: [f: :force, v: :verbose]
|
||||
)
|
||||
|
||||
force? = opts[:force] || false
|
||||
verbose? = opts[:verbose] || false
|
||||
|
||||
project = Mix.Project.config()
|
||||
source_paths = project[:clj_elixir_paths] || ["src"]
|
||||
build_path = Mix.Project.compile_path(project)
|
||||
|
||||
# Ensure build directory exists
|
||||
File.mkdir_p!(build_path)
|
||||
|
||||
# Find all .clje source files
|
||||
sources = find_sources(source_paths)
|
||||
|
||||
if sources == [] do
|
||||
{:noop, []}
|
||||
else
|
||||
manifest_path = manifest_path()
|
||||
manifest = load_manifest(manifest_path)
|
||||
|
||||
# Determine which files need recompilation
|
||||
{stale, removed} = partition_sources(sources, manifest, force?)
|
||||
|
||||
if stale == [] and removed == [] do
|
||||
if verbose?, do: Mix.shell().info("All .clje files are up to date")
|
||||
{:noop, []}
|
||||
else
|
||||
# Clean up modules from removed source files
|
||||
removed_diagnostics = clean_removed(removed, manifest, build_path, verbose?)
|
||||
|
||||
# Compile stale files
|
||||
{compiled, diagnostics} = compile_stale(stale, build_path, verbose?)
|
||||
|
||||
# Build new manifest from existing (unchanged) + newly compiled
|
||||
unchanged_entries =
|
||||
manifest
|
||||
|> Enum.reject(fn {path, _mtime, _modules} ->
|
||||
path in stale or path in removed
|
||||
end)
|
||||
|
||||
new_manifest = unchanged_entries ++ compiled
|
||||
save_manifest(manifest_path, new_manifest)
|
||||
|
||||
all_diagnostics = removed_diagnostics ++ diagnostics
|
||||
|
||||
has_errors? = Enum.any?(all_diagnostics, &(&1.severity == :error))
|
||||
|
||||
if has_errors? do
|
||||
{:error, to_mix_diagnostics(all_diagnostics)}
|
||||
else
|
||||
{:ok, to_mix_diagnostics(all_diagnostics)}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def manifests do
|
||||
[manifest_path()]
|
||||
end
|
||||
|
||||
@impl true
|
||||
def clean do
|
||||
manifest_path = manifest_path()
|
||||
manifest = load_manifest(manifest_path)
|
||||
build_path = Mix.Project.compile_path()
|
||||
|
||||
Enum.each(manifest, fn {_path, _mtime, modules} ->
|
||||
Enum.each(modules, fn module ->
|
||||
beam_file = Path.join(build_path, "#{module}.beam")
|
||||
File.rm(beam_file)
|
||||
end)
|
||||
end)
|
||||
|
||||
File.rm(manifest_path)
|
||||
:ok
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Source discovery
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp find_sources(paths) do
|
||||
paths
|
||||
|> Enum.flat_map(fn path ->
|
||||
path
|
||||
|> Path.join("**/*.clje")
|
||||
|> Path.wildcard()
|
||||
end)
|
||||
|> Enum.sort_by(fn path ->
|
||||
parts = Path.split(path)
|
||||
basename = Path.basename(path, ".clje")
|
||||
# Protocols must compile first (priority 0), then everything else (priority 1)
|
||||
priority = if basename == "protocols", do: 0, else: 1
|
||||
{-length(parts), priority, path}
|
||||
end)
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Staleness detection
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp partition_sources(sources, manifest, force?) do
|
||||
manifest_map = Map.new(manifest, fn {path, mtime, modules} -> {path, {mtime, modules}} end)
|
||||
|
||||
stale =
|
||||
if force? do
|
||||
sources
|
||||
else
|
||||
Enum.filter(sources, fn source ->
|
||||
case Map.get(manifest_map, source) do
|
||||
nil ->
|
||||
true
|
||||
|
||||
{old_mtime, _modules} ->
|
||||
current_mtime = file_mtime(source)
|
||||
current_mtime > old_mtime
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
source_set = MapSet.new(sources)
|
||||
|
||||
removed =
|
||||
manifest
|
||||
|> Enum.map(fn {path, _mtime, _modules} -> path end)
|
||||
|> Enum.reject(&MapSet.member?(source_set, &1))
|
||||
|
||||
{stale, removed}
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Compilation
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp compile_stale(sources, build_path, verbose?) do
|
||||
results =
|
||||
Enum.map(sources, fn source ->
|
||||
if verbose?, do: Mix.shell().info("Compiling #{source}")
|
||||
compile_source(source, build_path)
|
||||
end)
|
||||
|
||||
{compiled, diagnostics} =
|
||||
Enum.reduce(results, {[], []}, fn
|
||||
{:ok, entry, diags}, {compiled, all_diags} ->
|
||||
{[entry | compiled], all_diags ++ diags}
|
||||
|
||||
{:error, diags}, {compiled, all_diags} ->
|
||||
{compiled, all_diags ++ diags}
|
||||
end)
|
||||
|
||||
{Enum.reverse(compiled), diagnostics}
|
||||
end
|
||||
|
||||
defp compile_source(source, build_path) do
|
||||
case CljElixir.Compiler.compile_file_to_beam(source,
|
||||
output_dir: build_path,
|
||||
vector_as_list: true
|
||||
) do
|
||||
{:ok, modules} ->
|
||||
mtime = file_mtime(source)
|
||||
module_names = Enum.map(modules, fn {mod, _binary} -> mod end)
|
||||
entry = {source, mtime, module_names}
|
||||
|
||||
diagnostics =
|
||||
if module_names == [] do
|
||||
[
|
||||
%{
|
||||
severity: :warning,
|
||||
message: "#{source} produced no modules",
|
||||
file: source,
|
||||
line: 0,
|
||||
col: 0
|
||||
}
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
|
||||
{:ok, entry, diagnostics}
|
||||
|
||||
{:error, diagnostics} ->
|
||||
enriched =
|
||||
Enum.map(diagnostics, fn diag ->
|
||||
Map.put_new(diag, :file, source)
|
||||
end)
|
||||
|
||||
{:error, enriched}
|
||||
end
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Cleanup
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp clean_removed(removed, manifest, build_path, verbose?) do
|
||||
manifest_map = Map.new(manifest, fn {path, mtime, modules} -> {path, {mtime, modules}} end)
|
||||
|
||||
Enum.flat_map(removed, fn path ->
|
||||
case Map.get(manifest_map, path) do
|
||||
nil ->
|
||||
[]
|
||||
|
||||
{_mtime, modules} ->
|
||||
if verbose?, do: Mix.shell().info("Cleaning removed source #{path}")
|
||||
|
||||
Enum.each(modules, fn module ->
|
||||
beam_file = Path.join(build_path, "#{module}.beam")
|
||||
File.rm(beam_file)
|
||||
|
||||
# Purge the module from the code server
|
||||
:code.purge(module)
|
||||
:code.delete(module)
|
||||
end)
|
||||
|
||||
[]
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Manifest I/O
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp manifest_path do
|
||||
Path.join(Mix.Project.manifest_path(), @manifest_filename)
|
||||
end
|
||||
|
||||
defp load_manifest(path) do
|
||||
case File.read(path) do
|
||||
{:ok, contents} ->
|
||||
try do
|
||||
contents
|
||||
|> :erlang.binary_to_term()
|
||||
|> validate_manifest()
|
||||
rescue
|
||||
_ -> []
|
||||
end
|
||||
|
||||
{:error, _} ->
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_manifest(data) when is_list(data) do
|
||||
Enum.filter(data, fn
|
||||
{path, mtime, modules}
|
||||
when is_binary(path) and is_integer(mtime) and is_list(modules) ->
|
||||
true
|
||||
|
||||
_ ->
|
||||
false
|
||||
end)
|
||||
end
|
||||
|
||||
defp validate_manifest(_), do: []
|
||||
|
||||
defp save_manifest(path, entries) do
|
||||
File.mkdir_p!(Path.dirname(path))
|
||||
File.write!(path, :erlang.term_to_binary(entries))
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Diagnostics
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp to_mix_diagnostics(diagnostics) do
|
||||
Enum.map(diagnostics, fn diag ->
|
||||
%Mix.Task.Compiler.Diagnostic{
|
||||
file: Map.get(diag, :file, "unknown"),
|
||||
severity: diag.severity,
|
||||
message: diag.message,
|
||||
position: diag.line,
|
||||
compiler_name: "clj_elixir"
|
||||
}
|
||||
end)
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# File utilities
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
defp file_mtime(path) do
|
||||
case File.stat(path, time: :posix) do
|
||||
{:ok, %{mtime: mtime}} -> mtime
|
||||
{:error, _} -> 0
|
||||
end
|
||||
end
|
||||
end
|
||||
Reference in New Issue
Block a user