Phases 1-7: Complete CljElixir compiler through Malli schema adapter

Bootstrap compiler (reader, analyzer, transformer, compiler, Mix plugin),
core protocols (16 protocols for Map/List/Tuple/BitString), PersistentVector
(bit-partitioned trie), domain tools (clojurify/elixirify), BEAM concurrency
(receive, spawn, GenServer), control flow & macros (threading, try/catch,
destructuring, defmacro with quasiquote/auto-gensym), and Malli schema
adapter (m/=> specs, auto @type, recursive schemas, cross-references).

537 compiler tests + 55 Malli unit tests + 15 integration tests = 607 total.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-08 10:38:22 -04:00
commit d8719b6d48
26 changed files with 11487 additions and 0 deletions
+3
View File
@@ -0,0 +1,3 @@
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
]
+10
View File
@@ -0,0 +1,10 @@
/_build/
/cover/
/deps/
/doc/
/.fetch
erl_crash.dump
*.ez
*.beam
/tmp/
.elixir_ls/
+71
View File
@@ -0,0 +1,71 @@
defmodule CljElixir do
@moduledoc "CljElixir: Clojure-syntax language for the BEAM"
@doc """
Compile a CljElixir source string to Elixir AST.
Returns `{:ok, elixir_ast}` on success, or `{:error, diagnostics}` on failure.
## Options
* `:file` - the file path for error reporting (default: `"nofile"`)
## Examples
iex> CljElixir.compile_string("(+ 1 2)")
{:ok, {:+, [line: 1], [1, 2]}}
"""
@spec compile_string(String.t(), keyword()) :: {:ok, term()} | {:error, list()}
def compile_string(source, opts \\ []) do
CljElixir.Compiler.compile_string(source, opts)
end
@doc """
Compile a `.clje` file to Elixir AST.
Reads the file and delegates to `compile_string/2` with the `:file` option set.
Returns `{:ok, elixir_ast}` on success, or `{:error, diagnostics}` on failure.
Raises `File.Error` if the file cannot be read.
"""
@spec compile_file(Path.t(), keyword()) :: {:ok, term()} | {:error, list()}
def compile_file(path, opts \\ []) do
CljElixir.Compiler.compile_file(path, opts)
end
@doc """
Compile and evaluate a CljElixir source string.
Returns `{:ok, result, bindings}` on success, or `{:error, diagnostics}` on failure.
## Options
* `:file` - the file path for error reporting (default: `"nofile"`)
* `:bindings` - variable bindings for evaluation (default: `[]`)
* `:env` - the macro environment for evaluation (default: `__ENV__`)
## Examples
iex> CljElixir.eval_string("(+ 1 2)")
{:ok, 3, []}
"""
@spec eval_string(String.t(), keyword()) :: {:ok, term(), keyword()} | {:error, list()}
def eval_string(source, opts \\ []) do
CljElixir.Compiler.eval_string(source, opts)
end
@doc """
Compile and evaluate a `.clje` file.
Reads the file and delegates to `eval_string/2` with the `:file` option set.
Returns `{:ok, result, bindings}` on success, or `{:error, diagnostics}` on failure.
Raises `File.Error` if the file cannot be read.
"""
@spec eval_file(Path.t(), keyword()) :: {:ok, term(), keyword()} | {:error, list()}
def eval_file(path, opts \\ []) do
CljElixir.Compiler.eval_file(path, opts)
end
end
+626
View File
@@ -0,0 +1,626 @@
defmodule CljElixir.Analyzer do
@moduledoc """
AST analyzer and validator for CljElixir.
Performs lightweight static analysis on CljElixir AST forms (output of the Reader)
before they are passed to the Transformer. Catches common structural errors early
with clear diagnostic messages.
## Validations
1. **Special form arity** - `defmodule` needs name + body, `let` needs a vector
with an even number of binding pairs, `if` needs 2-3 args, `case` needs a
subject + even pattern/body pairs, `cond` needs even pairs, `loop` needs a
vector with even binding pairs.
2. **Map literal validation** - Maps must have an even number of forms (key-value pairs).
3. **`recur` position** - `recur` must appear in tail position. In `if`/`case`/`cond`,
the tail position is the last expression of each branch. In `let`/`do`, the tail
position is the last expression.
4. **Nested `recur`** - `recur` inside a nested `loop` should only refer to the
innermost loop, not an outer one.
## Return Value
Returns `{:ok, forms}` when the AST is valid (passes forms through unchanged),
or `{:error, diagnostics}` when errors are found.
Diagnostics are maps with keys: `:severity`, `:message`, `:line`, `:col`.
"""
@type diagnostic :: %{
severity: :error | :warning,
message: String.t(),
line: non_neg_integer(),
col: non_neg_integer()
}
@doc """
Analyze and validate a list of CljElixir AST forms.
Returns `{:ok, forms}` if all validations pass, or `{:error, diagnostics}`
with a list of diagnostic maps describing the errors found.
"""
@spec analyze(list()) :: {:ok, list()} | {:error, [diagnostic()]}
def analyze(forms) when is_list(forms) do
diagnostics =
forms
|> Enum.flat_map(fn form -> validate_form(form, %{tail: true, in_loop: false, in_fn: false}) end)
case Enum.filter(diagnostics, &(&1.severity == :error)) do
[] -> {:ok, forms}
_errors -> {:error, diagnostics}
end
end
def analyze(form) do
analyze(List.wrap(form))
end
# ---------------------------------------------------------------------------
# Form validation - dispatches on the head of each s-expression
# ---------------------------------------------------------------------------
# A list form starting with an atom is an s-expression: (special-form ...)
defp validate_form({:list, meta, [{:symbol, _, "defmodule"} | args]}, ctx) do
validate_defmodule(args, meta, ctx)
end
defp validate_form({:list, meta, [{:symbol, _, "defn"} | args]}, ctx) do
validate_defn(args, meta, ctx)
end
defp validate_form({:list, meta, [{:symbol, _, "defn-"} | args]}, ctx) do
validate_defn(args, meta, ctx)
end
defp validate_form({:list, meta, [{:symbol, _, "fn"} | args]}, ctx) do
validate_fn(args, meta, ctx)
end
defp validate_form({:list, meta, [{:symbol, _, "let"} | args]}, ctx) do
validate_let(args, meta, ctx)
end
defp validate_form({:list, meta, [{:symbol, _, "if"} | args]}, ctx) do
validate_if(args, meta, ctx)
end
defp validate_form({:list, meta, [{:symbol, _, "case"} | args]}, ctx) do
validate_case(args, meta, ctx)
end
defp validate_form({:list, meta, [{:symbol, _, "cond"} | args]}, ctx) do
validate_cond(args, meta, ctx)
end
defp validate_form({:list, meta, [{:symbol, _, "loop"} | args]}, ctx) do
validate_loop(args, meta, ctx)
end
defp validate_form({:list, meta, [{:symbol, _, "recur"} | _args]}, ctx) do
validate_recur(meta, ctx)
end
defp validate_form({:list, meta, [{:symbol, _, "do"} | args]}, ctx) do
validate_do(args, meta, ctx)
end
defp validate_form({:map, meta, elements}, ctx) do
validate_map_literal(elements, meta, ctx)
end
# Generic list form: validate children
defp validate_form({:list, _meta, children}, ctx) when is_list(children) do
# In a function call, only the last argument is not necessarily in tail position,
# but for recur analysis, none of the arguments to a call are in tail position
# (since the call itself might be, but its args are not).
non_tail_ctx = %{ctx | tail: false}
Enum.flat_map(children, fn child ->
validate_form(child, non_tail_ctx)
end)
end
# Vectors: validate elements
defp validate_form({:vector, _meta, elements}, ctx) when is_list(elements) do
non_tail_ctx = %{ctx | tail: false}
Enum.flat_map(elements, fn el -> validate_form(el, non_tail_ctx) end)
end
# Sets: validate elements
defp validate_form({:set, _meta, elements}, ctx) when is_list(elements) do
non_tail_ctx = %{ctx | tail: false}
Enum.flat_map(elements, fn el -> validate_form(el, non_tail_ctx) end)
end
# Atoms, numbers, strings, symbols, keywords — always valid
defp validate_form(_leaf, _ctx), do: []
# ---------------------------------------------------------------------------
# Special form validators
# ---------------------------------------------------------------------------
defp validate_defmodule(args, meta, ctx) do
line = meta_line(meta)
col = meta_col(meta)
case args do
[] ->
[
%{
severity: :error,
message: "defmodule requires a module name and at least one body expression",
line: line,
col: col
}
]
[_name] ->
[
%{
severity: :error,
message: "defmodule requires at least one body expression after the module name",
line: line,
col: col
}
]
[_name | body] ->
# Body forms are each in tail position within the module (top-level forms)
Enum.flat_map(body, fn form ->
validate_form(form, %{ctx | tail: true, in_loop: false, in_fn: false})
end)
end
end
defp validate_defn(args, meta, ctx) do
line = meta_line(meta)
col = meta_col(meta)
case args do
[] ->
[
%{
severity: :error,
message: "defn requires a function name, parameter vector, and body",
line: line,
col: col
}
]
[_name] ->
[
%{
severity: :error,
message: "defn requires a parameter vector and body after the function name",
line: line,
col: col
}
]
[_name, maybe_doc | rest] ->
# Could be: (defn name [params] body...)
# or: (defn name "docstring" [params] body...)
# or: (defn name ([params1] body1) ([params2] body2)) -- multi-arity
fn_ctx = %{ctx | tail: true, in_fn: true, in_loop: false}
case maybe_doc do
# Multi-arity: (defn name (clause1) (clause2) ...)
{:list, _, _} ->
clauses = [maybe_doc | rest]
Enum.flat_map(clauses, fn clause -> validate_fn_clause(clause, fn_ctx) end)
# Docstring form: (defn name "doc" ...)
{:string, _, _} ->
validate_defn_body(rest, fn_ctx, line, col)
# Single arity with param vector: (defn name [params] body...)
{:vector, _, _} ->
validate_fn_body(rest, fn_ctx)
_ ->
validate_fn_body(rest, fn_ctx)
end
end
end
defp validate_defn_body(rest, ctx, line, col) do
case rest do
[] ->
[
%{
severity: :error,
message: "defn requires a parameter vector and body after docstring",
line: line,
col: col
}
]
[{:vector, _, _} | body] ->
validate_fn_body(body, ctx)
[{:list, _, _} | _] = clauses ->
# Multi-arity after docstring
Enum.flat_map(clauses, fn clause -> validate_fn_clause(clause, ctx) end)
_ ->
[]
end
end
defp validate_fn(args, meta, ctx) do
line = meta_line(meta)
col = meta_col(meta)
fn_ctx = %{ctx | tail: true, in_fn: true, in_loop: false}
case args do
[] ->
[
%{
severity: :error,
message: "fn requires a parameter vector and body",
line: line,
col: col
}
]
# (fn [params] body...) - single arity
[{:vector, _, _} | body] ->
validate_fn_body(body, fn_ctx)
# (fn name [params] body...) - named fn
[{:symbol, _, _}, {:vector, _, _} | body] ->
validate_fn_body(body, fn_ctx)
# (fn (clause1) (clause2) ...) - multi-arity
[{:list, _, _} | _] = clauses ->
Enum.flat_map(clauses, fn clause -> validate_fn_clause(clause, fn_ctx) end)
# (fn name (clause1) (clause2) ...) - named multi-arity
[{:symbol, _, _} | [{:list, _, _} | _] = clauses] ->
Enum.flat_map(clauses, fn clause -> validate_fn_clause(clause, fn_ctx) end)
_ ->
[]
end
end
defp validate_fn_clause({:list, _meta, [{:vector, _, _} | body]}, ctx) do
validate_fn_body(body, ctx)
end
defp validate_fn_clause(_other, _ctx), do: []
defp validate_fn_body([], _ctx), do: []
defp validate_fn_body(body, ctx) do
{leading, [last]} = Enum.split(body, -1)
non_tail = %{ctx | tail: false}
leading_diags = Enum.flat_map(leading, fn form -> validate_form(form, non_tail) end)
last_diags = validate_form(last, ctx)
leading_diags ++ last_diags
end
defp validate_let(args, meta, ctx) do
line = meta_line(meta)
col = meta_col(meta)
case args do
[] ->
[
%{
severity: :error,
message: "let requires a binding vector and body",
line: line,
col: col
}
]
[{:vector, vmeta, bindings} | body] ->
binding_diags = validate_binding_vector(bindings, vmeta, "let")
body_diags =
case body do
[] ->
[
%{
severity: :warning,
message: "let with no body expression always returns nil",
line: line,
col: col
}
]
_ ->
validate_body_forms(body, ctx)
end
binding_diags ++ body_diags
_ ->
[
%{
severity: :error,
message: "let requires a binding vector as its first argument",
line: line,
col: col
}
]
end
end
defp validate_if(args, meta, ctx) do
line = meta_line(meta)
col = meta_col(meta)
case length(args) do
n when n < 2 ->
[
%{
severity: :error,
message: "if requires a condition and at least a then branch (got #{n} argument(s))",
line: line,
col: col
}
]
n when n > 3 ->
[
%{
severity: :error,
message: "if accepts at most 3 arguments (condition, then, else), got #{n}",
line: line,
col: col
}
]
2 ->
[condition, then_branch] = args
non_tail = %{ctx | tail: false}
validate_form(condition, non_tail) ++
validate_form(then_branch, ctx)
3 ->
[condition, then_branch, else_branch] = args
non_tail = %{ctx | tail: false}
validate_form(condition, non_tail) ++
validate_form(then_branch, ctx) ++
validate_form(else_branch, ctx)
end
end
defp validate_case(args, meta, ctx) do
line = meta_line(meta)
col = meta_col(meta)
case args do
[] ->
[
%{
severity: :error,
message: "case requires a subject expression and at least one pattern/body pair",
line: line,
col: col
}
]
[_subject] ->
[
%{
severity: :error,
message: "case requires at least one pattern/body pair after the subject",
line: line,
col: col
}
]
[subject | pairs] ->
non_tail = %{ctx | tail: false}
subject_diags = validate_form(subject, non_tail)
pair_diags =
if rem(length(pairs), 2) != 0 do
[
%{
severity: :error,
message:
"case requires an even number of pattern/body forms, got #{length(pairs)}",
line: line,
col: col
}
]
else
pairs
|> Enum.chunk_every(2)
|> Enum.flat_map(fn
[_pattern, body] ->
validate_form(body, ctx)
_ ->
[]
end)
end
subject_diags ++ pair_diags
end
end
defp validate_cond(args, meta, ctx) do
line = meta_line(meta)
col = meta_col(meta)
if rem(length(args), 2) != 0 do
[
%{
severity: :error,
message: "cond requires an even number of test/expression pairs, got #{length(args)}",
line: line,
col: col
}
]
else
non_tail = %{ctx | tail: false}
args
|> Enum.chunk_every(2)
|> Enum.flat_map(fn
[test, body] ->
validate_form(test, non_tail) ++ validate_form(body, ctx)
_ ->
[]
end)
end
end
defp validate_loop(args, meta, ctx) do
line = meta_line(meta)
col = meta_col(meta)
case args do
[] ->
[
%{
severity: :error,
message: "loop requires a binding vector and body",
line: line,
col: col
}
]
[{:vector, vmeta, bindings} | body] ->
binding_diags = validate_binding_vector(bindings, vmeta, "loop")
body_diags =
case body do
[] ->
[
%{
severity: :warning,
message: "loop with no body expression always returns nil",
line: line,
col: col
}
]
_ ->
loop_ctx = %{ctx | tail: true, in_loop: true}
validate_body_forms(body, loop_ctx)
end
binding_diags ++ body_diags
_ ->
[
%{
severity: :error,
message: "loop requires a binding vector as its first argument",
line: line,
col: col
}
]
end
end
defp validate_recur(meta, ctx) do
line = meta_line(meta)
col = meta_col(meta)
cond do
not ctx.tail ->
[
%{
severity: :error,
message: "recur must be in tail position",
line: line,
col: col
}
]
not (ctx.in_loop or ctx.in_fn) ->
[
%{
severity: :error,
message: "recur must be inside a loop or function body",
line: line,
col: col
}
]
true ->
[]
end
end
defp validate_do(args, _meta, ctx) do
validate_body_forms(args, ctx)
end
defp validate_map_literal(elements, meta, _ctx) do
if rem(length(elements), 2) != 0 do
line = meta_line(meta)
col = meta_col(meta)
[
%{
severity: :error,
message:
"map literal requires an even number of forms (key-value pairs), got #{length(elements)}",
line: line,
col: col
}
]
else
[]
end
end
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
defp validate_binding_vector(bindings, meta, form_name) do
if rem(length(bindings), 2) != 0 do
line = meta_line(meta)
col = meta_col(meta)
[
%{
severity: :error,
message:
"#{form_name} binding vector requires an even number of forms (name/value pairs), got #{length(bindings)}",
line: line,
col: col
}
]
else
[]
end
end
defp validate_body_forms([], _ctx), do: []
defp validate_body_forms(forms, ctx) do
{leading, [last]} = Enum.split(forms, -1)
non_tail = %{ctx | tail: false}
leading_diags = Enum.flat_map(leading, fn form -> validate_form(form, non_tail) end)
last_diags = validate_form(last, ctx)
leading_diags ++ last_diags
end
defp meta_line(meta) when is_map(meta), do: Map.get(meta, :line, 0)
defp meta_line(meta) when is_list(meta), do: Keyword.get(meta, :line, 0)
defp meta_line(_), do: 0
defp meta_col(meta) when is_map(meta), do: Map.get(meta, :col, 0)
defp meta_col(meta) when is_list(meta), do: Keyword.get(meta, :col, 0)
defp meta_col(_), do: 0
end
+312
View File
@@ -0,0 +1,312 @@
defmodule CljElixir.Compiler do
@moduledoc """
Orchestrates the CljElixir compilation pipeline.
Chains: Reader -> Analyzer -> Transformer -> Elixir compilation.
The pipeline:
1. **Reader** (`CljElixir.Reader`) - Parses source text into CljElixir AST
(s-expression forms represented as Elixir terms).
2. **Analyzer** (`CljElixir.Analyzer`) - Validates the AST, checking special form
arity, map literal structure, recur position, etc.
3. **Transformer** (`CljElixir.Transformer`) - Converts CljElixir AST into Elixir
AST (`{operation, metadata, arguments}` tuples).
4. **Elixir Compiler** - `Code.eval_quoted/3` or `Code.compile_quoted/2` handles
macro expansion, protocol consolidation, and BEAM bytecode generation.
"""
@doc """
Compile a CljElixir source string to Elixir AST.
Runs the full pipeline: read -> analyze -> transform.
Returns `{:ok, elixir_ast}` on success, or `{:error, diagnostics}` on failure.
## Options
* `:file` - the source file path for error reporting (default: `"nofile"`)
"""
@spec compile_string(String.t(), keyword()) :: {:ok, term()} | {:error, list()}
def compile_string(source, opts \\ []) do
file = opts[:file] || "nofile"
with {:ok, forms} <- read(source, file),
{:ok, forms} <- analyze(forms, opts),
{:ok, elixir_ast} <- transform(forms, opts) do
{:ok, elixir_ast}
end
end
@doc """
Compile a `.clje` file to Elixir AST.
Reads the file from disk and delegates to `compile_string/2` with the
`:file` option set automatically.
Returns `{:ok, elixir_ast}` on success, or `{:error, diagnostics}` on failure.
Raises `File.Error` if the file cannot be read.
"""
@spec compile_file(Path.t(), keyword()) :: {:ok, term()} | {:error, list()}
def compile_file(path, opts \\ []) do
case File.read(path) do
{:ok, source} ->
compile_string(source, Keyword.put(opts, :file, path))
{:error, reason} ->
{:error,
[
%{
severity: :error,
message: "could not read file #{path}: #{:file.format_error(reason)}",
file: path,
line: 0,
col: 0
}
]}
end
end
@doc """
Compile and evaluate a CljElixir source string.
Compiles the source to Elixir AST and evaluates it via `Code.eval_quoted/3`.
Returns `{:ok, result, bindings}` on success, or `{:error, diagnostics}` on failure.
## Options
* `:file` - the source file path for error reporting (default: `"nofile"`)
* `:bindings` - variable bindings for evaluation (default: `[]`)
* `:env` - the macro environment for evaluation
"""
@spec eval_string(String.t(), keyword()) :: {:ok, term(), keyword()} | {:error, list()}
def eval_string(source, opts \\ []) do
with {:ok, ast} <- compile_string(source, opts) do
try do
bindings = opts[:bindings] || []
env_opts = build_eval_opts(opts)
{result, new_bindings} = Code.eval_quoted(ast, bindings, env_opts)
{:ok, result, new_bindings}
rescue
e ->
file = opts[:file] || "nofile"
{:error,
[
%{
severity: :error,
message: format_eval_error(e),
file: file,
line: extract_line(e),
col: 0
}
]}
end
end
end
@doc """
Compile and evaluate a `.clje` file.
Reads the file from disk and delegates to `eval_string/2`.
Returns `{:ok, result, bindings}` on success, or `{:error, diagnostics}` on failure.
Raises `File.Error` if the file cannot be read.
"""
@spec eval_file(Path.t(), keyword()) :: {:ok, term(), keyword()} | {:error, list()}
def eval_file(path, opts \\ []) do
case File.read(path) do
{:ok, source} ->
eval_string(source, Keyword.put(opts, :file, path))
{:error, reason} ->
{:error,
[
%{
severity: :error,
message: "could not read file #{path}: #{:file.format_error(reason)}",
file: path,
line: 0,
col: 0
}
]}
end
end
@doc """
Compile a CljElixir source string to BEAM modules and write .beam files.
Compiles the source to Elixir AST and then uses `Code.compile_quoted/2` to
produce BEAM bytecode modules.
Returns `{:ok, [{module, binary}]}` on success, or `{:error, diagnostics}` on failure.
## Options
* `:file` - the source file path for error reporting (default: `"nofile"`)
"""
@spec compile_to_beam(String.t(), keyword()) ::
{:ok, [{module(), binary()}]} | {:error, list()}
def compile_to_beam(source, opts \\ []) do
with {:ok, ast} <- compile_string(source, opts) do
try do
file = opts[:file] || "nofile"
modules = Code.compile_quoted(ast, file)
{:ok, modules}
rescue
e ->
file = opts[:file] || "nofile"
{:error,
[
%{
severity: :error,
message: "BEAM compilation failed: #{format_eval_error(e)}",
file: file,
line: extract_line(e),
col: 0
}
]}
end
end
end
@doc """
Compile a `.clje` file to BEAM modules and write .beam files to the given
output directory.
Returns `{:ok, [{module, binary}]}` on success, or `{:error, diagnostics}` on failure.
## Options
* `:output_dir` - directory to write .beam files (default: does not write)
"""
@spec compile_file_to_beam(Path.t(), keyword()) ::
{:ok, [{module(), binary()}]} | {:error, list()}
def compile_file_to_beam(path, opts \\ []) do
case File.read(path) do
{:ok, source} ->
opts = Keyword.put(opts, :file, path)
with {:ok, modules} <- compile_to_beam(source, opts) do
case opts[:output_dir] do
nil ->
{:ok, modules}
output_dir ->
write_beam_files(modules, output_dir)
end
end
{:error, reason} ->
{:error,
[
%{
severity: :error,
message: "could not read file #{path}: #{:file.format_error(reason)}",
file: path,
line: 0,
col: 0
}
]}
end
end
# ---------------------------------------------------------------------------
# Private helpers
# ---------------------------------------------------------------------------
defp read(source, file) do
case CljElixir.Reader.read_string(source) do
{:ok, forms} ->
{:ok, forms}
{:error, reason} when is_binary(reason) ->
{:error,
[%{severity: :error, message: "read error: #{reason}", file: file, line: 0, col: 0}]}
{:error, reason} ->
{:error,
[
%{
severity: :error,
message: "read error: #{inspect(reason)}",
file: file,
line: 0,
col: 0
}
]}
end
end
defp analyze(forms, _opts) do
case CljElixir.Analyzer.analyze(forms) do
{:ok, analyzed_forms} ->
{:ok, analyzed_forms}
{:error, diagnostics} when is_list(diagnostics) ->
{:error, diagnostics}
{:error, reason} ->
{:error,
[%{severity: :error, message: "analysis error: #{inspect(reason)}", line: 0, col: 0}]}
end
end
defp transform(forms, opts) do
try do
ctx =
if opts[:vector_as_list] do
%CljElixir.Transformer.Context{vector_as_list: true}
else
%CljElixir.Transformer.Context{}
end
elixir_ast = CljElixir.Transformer.transform(forms, ctx)
{:ok, elixir_ast}
rescue
e ->
{:error,
[
%{
severity: :error,
message: "transform error: #{format_eval_error(e)}",
line: 0,
col: 0
}
]}
end
end
defp build_eval_opts(opts) do
eval_opts = [file: opts[:file] || "nofile"]
case opts[:env] do
nil -> eval_opts
env -> Keyword.put(eval_opts, :env, env)
end
end
defp format_eval_error(%{__struct__: struct} = e) when is_atom(struct) do
Exception.message(e)
rescue
_ -> inspect(e)
end
defp format_eval_error(e), do: inspect(e)
defp extract_line(%{line: line}) when is_integer(line), do: line
defp extract_line(_), do: 0
defp write_beam_files(modules, output_dir) do
File.mkdir_p!(output_dir)
Enum.each(modules, fn {module, binary} ->
beam_path = Path.join(output_dir, "#{module}.beam")
File.write!(beam_path, binary)
end)
{:ok, modules}
end
end
+112
View File
@@ -0,0 +1,112 @@
defmodule CljElixir.Equality do
@moduledoc """
Cross-type equality for CljElixir.
Handles the case where `(= [1 2 3] '(1 2 3))` should return true —
PersistentVector and list are both sequential types with the same elements.
"""
def equiv(a, b) when a === b, do: true
def equiv(%CljElixir.PersistentVector{} = a, %CljElixir.PersistentVector{} = b) do
a.cnt == b.cnt and CljElixir.PersistentVector.to_list(a) == CljElixir.PersistentVector.to_list(b)
end
def equiv(%CljElixir.PersistentVector{} = a, b) when is_list(b) do
CljElixir.PersistentVector.to_list(a) == b
end
def equiv(a, %CljElixir.PersistentVector{} = b) when is_list(a) do
a == CljElixir.PersistentVector.to_list(b)
end
def equiv(%CljElixir.SubVector{} = a, b) do
CljElixir.SubVector.sv_to_list(a) |> equiv_list(b)
end
def equiv(a, %CljElixir.SubVector{} = b) do
equiv_list(CljElixir.SubVector.sv_to_list(b), a)
end
def equiv(a, b), do: a == b
defp equiv_list(list, other) when is_list(other), do: list == other
defp equiv_list(list, %CljElixir.PersistentVector{} = pv) do
list == CljElixir.PersistentVector.to_list(pv)
end
defp equiv_list(_, _), do: false
end
defimpl Enumerable, for: CljElixir.PersistentVector do
def count(pv), do: {:ok, pv.cnt}
def member?(_pv, _value), do: {:error, __MODULE__}
def reduce(_pv, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(pv, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(pv, &1, fun)}
def reduce(pv, {:cont, acc}, fun) do
list = CljElixir.PersistentVector.to_list(pv)
Enumerable.List.reduce(list, {:cont, acc}, fun)
end
def slice(pv) do
size = pv.cnt
{:ok, size, &slice_fun(pv, &1, &2, &3)}
end
defp slice_fun(pv, start, length, step) do
start..(start + (length - 1) * step)//step
|> Enum.map(fn i -> CljElixir.PersistentVector.pv_nth(pv, i) end)
end
end
defimpl Collectable, for: CljElixir.PersistentVector do
def into(pv) do
collector_fun = fn
acc, {:cont, elem} -> CljElixir.PersistentVector.pv_conj(acc, elem)
acc, :done -> acc
_acc, :halt -> :ok
end
{pv, collector_fun}
end
end
defimpl Enumerable, for: Tuple do
def count(t), do: {:ok, tuple_size(t)}
def member?(_t, _value), do: {:error, __MODULE__}
def reduce(_t, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(t, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(t, &1, fun)}
def reduce(t, {:cont, acc}, fun) do
list = Tuple.to_list(t)
Enumerable.List.reduce(list, {:cont, acc}, fun)
end
def slice(t) do
size = tuple_size(t)
{:ok, size, &slice_fun(t, &1, &2, &3)}
end
defp slice_fun(t, start, length, step) do
start..(start + (length - 1) * step)//step
|> Enum.map(fn i -> elem(t, i) end)
end
end
defimpl Collectable, for: Tuple do
def into(t) do
collector_fun = fn
acc, {:cont, elem} -> :erlang.append_element(acc, elem)
acc, :done -> acc
_acc, :halt -> :ok
end
{t, collector_fun}
end
end
+368
View File
@@ -0,0 +1,368 @@
defmodule CljElixir.Malli do
@moduledoc """
Converts Malli-style schema data to Elixir typespec AST.
Takes plain Elixir terms (atoms, lists, maps) representing Malli schemas
and produces Elixir AST nodes suitable for `@spec` and `@type` attributes.
## Public API
* `spec_ast/2` - Generate `@spec` AST nodes from a function schema
* `type_ast/2,3` - Generate `@type` AST nodes from a type schema
* `schema_to_typespec/2` - Convert a schema to its typespec AST (the type part only)
"""
# Atoms that need quoted syntax in source but are valid at runtime
@arrow :"=>"
@optional_marker :"?"
# ── Public API ──────────────────────────────────────────────────────
@doc """
Generates a list of `@spec` AST nodes for the given function name and schema.
`schema` is either `[:=> ...]` for a single-arity function or
`[:function ...]` for a multi-arity function.
Returns a list because `:function` schemas and optional params
can produce multiple `@spec` entries.
"""
@spec spec_ast(atom(), list(), keyword()) :: list()
def spec_ast(fun_name, schema, opts \\ [])
def spec_ast(fun_name, [:function | clauses], opts) do
Enum.flat_map(clauses, fn clause -> spec_ast(fun_name, clause, opts) end)
end
def spec_ast(fun_name, [@arrow, [:cat | param_schemas], return_schema], opts) do
ret_ast = schema_to_typespec(return_schema, opts)
param_groups = expand_optional_params(param_schemas)
Enum.map(param_groups, fn params ->
param_asts = Enum.map(params, &schema_to_typespec(&1, opts))
wrap_spec(fun_name, param_asts, ret_ast)
end)
end
@doc """
Generates a `@type` AST node for the given type name and schema.
Accepts an optional `opts` keyword list with `:known_types` for cross-references.
For schemas with a `:registry` key, generates multiple types from the registry.
"""
@spec type_ast(atom(), list(), keyword()) :: tuple() | list()
def type_ast(type_name, schema, opts \\ [])
def type_ast(type_name, [:schema, %{registry: registry}, ref_schema], opts) do
type_ast_registry(type_name, [:schema, %{registry: registry}, ref_schema], registry, opts)
end
def type_ast(type_name, schema, opts) do
type_body = schema_to_typespec(schema, opts)
wrap_type(type_name, type_body)
end
@doc """
Generates a list of `@type` AST nodes, one for each entry in the registry.
`registry_types` is a map of `{name_atom => schema}` pairs or a list of
`{name_atom, schema}` tuples.
"""
def type_ast_registry(_type_name, [:schema, %{registry: _}, _ref], registry_types, opts) when is_map(registry_types) do
Enum.map(registry_types, fn {name, schema} ->
clean_name = clean_registry_name(name)
body = schema_to_typespec(schema, Keyword.put(opts, :registry, registry_types))
wrap_type(clean_name, body)
end)
end
def type_ast_registry(_type_name, [:schema, %{registry: _}, _ref], registry_types, opts) when is_list(registry_types) do
Enum.map(registry_types, fn {name, schema} ->
clean_name = clean_registry_name(name)
body = schema_to_typespec(schema, Keyword.put(opts, :registry, Map.new(registry_types)))
wrap_type(clean_name, body)
end)
end
# ── schema_to_typespec ──────────────────────────────────────────────
@doc """
Converts a schema to its typespec AST representation (the type part,
not the `@type` wrapper).
## Options
* `:known_types` - map of `%{"User" => :user, ...}` for cross-schema references
* `:registry` - map of registry types for resolving `:ref` references
"""
@spec schema_to_typespec(term(), keyword()) :: term()
def schema_to_typespec(schema, opts \\ [])
# ── Primitives ──────────────────────────────────────────────────────
def schema_to_typespec(:string, _opts), do: string_t_ast()
def schema_to_typespec(:int, _opts), do: {:integer, [], []}
def schema_to_typespec(:integer, _opts), do: {:integer, [], []}
def schema_to_typespec(:float, _opts), do: {:float, [], []}
def schema_to_typespec(:number, _opts), do: {:number, [], []}
def schema_to_typespec(:boolean, _opts), do: {:boolean, [], []}
def schema_to_typespec(:atom, _opts), do: {:atom, [], []}
def schema_to_typespec(:keyword, _opts), do: {:atom, [], []}
def schema_to_typespec(:any, _opts), do: {:any, [], []}
def schema_to_typespec(:nil, _opts), do: nil
def schema_to_typespec(:pid, _opts), do: {:pid, [], []}
def schema_to_typespec(:port, _opts), do: {:port, [], []}
def schema_to_typespec(:reference, _opts), do: {:reference, [], []}
def schema_to_typespec(:"pos-int", _opts), do: {:pos_integer, [], []}
def schema_to_typespec(:"neg-int", _opts), do: {:neg_integer, [], []}
def schema_to_typespec(:"nat-int", _opts), do: {:non_neg_integer, [], []}
# ── Schema references (string keys) ────────────────────────────────
def schema_to_typespec(name, opts) when is_binary(name) do
known = Keyword.get(opts, :known_types, %{})
case Map.fetch(known, name) do
{:ok, type_name} -> {type_name, [], []}
:error -> {:any, [], []}
end
end
# ── Literal values (atoms that aren't schema keywords) ──────────────
def schema_to_typespec(atom, _opts) when is_atom(atom), do: atom
def schema_to_typespec(int, _opts) when is_integer(int), do: int
# ── Compound and container types (list schemas) ─────────────────────
def schema_to_typespec([head | _rest] = schema, opts) do
convert_list_schema(head, schema, opts)
end
# ── Fallback ───────────────────────────────────────────────────────
def schema_to_typespec(_, _opts), do: {:any, [], []}
# ── List schema dispatch ────────────────────────────────────────────
defp convert_list_schema(:or, [_ | types], opts) do
type_asts = Enum.map(types, &schema_to_typespec(&1, opts))
right_assoc_union(type_asts)
end
defp convert_list_schema(:and, [_ | schemas], opts) do
resolve_and_type(schemas, opts)
end
defp convert_list_schema(:maybe, [:maybe, schema], opts) do
inner = schema_to_typespec(schema, opts)
{:|, [], [inner, nil]}
end
defp convert_list_schema(:enum, [_ | values], _opts) do
right_assoc_union(values)
end
defp convert_list_schema(:=, [:=, value], _opts), do: value
defp convert_list_schema(:map, [_ | field_specs], opts) do
fields =
Enum.map(field_specs, fn
[name, {:optional, true}, schema] ->
{name, schema_to_typespec(schema, opts)}
[name, schema] ->
{name, schema_to_typespec(schema, opts)}
end)
{:%{}, [], fields}
end
defp convert_list_schema(:"map-of", [_, key_schema, val_schema], opts) do
key_ast = schema_to_typespec(key_schema, opts)
val_ast = schema_to_typespec(val_schema, opts)
{:%{}, [], [{{:optional, [], [key_ast]}, val_ast}]}
end
defp convert_list_schema(:list, [:list, elem_schema], opts) do
[schema_to_typespec(elem_schema, opts)]
end
defp convert_list_schema(:vector, _schema, _opts) do
persistent_vector_t_ast()
end
defp convert_list_schema(:set, _schema, _opts) do
mapset_t_ast()
end
defp convert_list_schema(:tuple, [_ | elem_schemas], opts) do
elems = Enum.map(elem_schemas, &schema_to_typespec(&1, opts))
{:{}, [], elems}
end
defp convert_list_schema(:ref, [:ref, name], opts) do
clean = clean_registry_name(name)
registry = Keyword.get(opts, :registry, %{})
if Map.has_key?(registry, name) or Map.has_key?(registry, clean) do
{clean, [], []}
else
known = Keyword.get(opts, :known_types, %{})
case Map.fetch(known, name) do
{:ok, type_name} -> {type_name, [], []}
:error -> {clean, [], []}
end
end
end
defp convert_list_schema(:schema, [:schema, %{registry: registry}, ref_schema], opts) do
merged_opts = Keyword.put(opts, :registry, registry)
schema_to_typespec(ref_schema, merged_opts)
end
defp convert_list_schema(:>, _, _opts), do: {:any, [], []}
defp convert_list_schema(:>=, _, _opts), do: {:any, [], []}
defp convert_list_schema(:<, _, _opts), do: {:any, [], []}
defp convert_list_schema(:<=, _, _opts), do: {:any, [], []}
defp convert_list_schema(head, schema, opts) when head == @arrow do
[@arrow, [:cat | params], ret] = schema
param_asts = Enum.map(params, &schema_to_typespec(&1, opts))
ret_ast = schema_to_typespec(ret, opts)
[{:->, [], [param_asts, ret_ast]}]
end
defp convert_list_schema(_, _, _opts), do: {:any, [], []}
# ── Private helpers ─────────────────────────────────────────────────
defp string_t_ast do
{{:., [], [{:__aliases__, [alias: false], [:String]}, :t]}, [], []}
end
defp mapset_t_ast do
{{:., [], [{:__aliases__, [alias: false], [:MapSet]}, :t]}, [], []}
end
defp persistent_vector_t_ast do
{{:., [], [{:__aliases__, [alias: false], [:CljElixir, :PersistentVector]}, :t]}, [], []}
end
defp right_assoc_union([single]), do: single
defp right_assoc_union([first | rest]) do
{:|, [], [first, right_assoc_union(rest)]}
end
defp wrap_spec(fun_name, param_asts, ret_ast) do
{:@, [], [
{:spec, [], [
{:"::", [], [
{fun_name, [], param_asts},
ret_ast
]}
]}
]}
end
defp wrap_type(type_name, body_ast) do
{:@, [], [
{:type, [], [
{:"::", [], [
{type_name, [], []},
body_ast
]}
]}
]}
end
defp clean_registry_name(name) when is_atom(name) do
name_str = Atom.to_string(name)
cleaned =
name_str
|> String.replace(~r/^(Elixir\.CljElixir\.|::)/, "")
String.to_atom(cleaned)
end
defp clean_registry_name(name), do: name
# Expand optional params into all param combinations.
# E.g., [:string, [:"?", :string], [:"?", :int]] produces:
# [[:string], [:string, :string], [:string, :string, :int]]
defp expand_optional_params(param_schemas) do
{required, optionals} = split_required_optional(param_schemas)
for n <- 0..length(optionals) do
required ++ Enum.take(optionals, n)
end
end
defp split_required_optional(params) do
split_required_optional(params, [])
end
defp split_required_optional([[@optional_marker, schema] | rest], req_acc) do
optionals = [schema | extract_optionals(rest)]
{Enum.reverse(req_acc), optionals}
end
defp split_required_optional([param | rest], req_acc) do
split_required_optional(rest, [param | req_acc])
end
defp split_required_optional([], req_acc) do
{Enum.reverse(req_acc), []}
end
defp extract_optionals([[@optional_marker, schema] | rest]) do
[schema | extract_optionals(rest)]
end
defp extract_optionals([_ | rest]), do: extract_optionals(rest)
defp extract_optionals([]), do: []
# Resolve :and types — extract most specific expressible type.
# Special cases: [:and :int [:> 0]] -> pos_integer()
# [:and :int [:>= 0]] -> non_neg_integer()
defp resolve_and_type(schemas, opts) do
base_types = Enum.filter(schemas, &recognized_schema?/1)
constraints = Enum.filter(schemas, &constraint?/1)
case {base_types, constraints} do
{[:int], [[:>, 0]]} -> {:pos_integer, [], []}
{[:integer], [[:>, 0]]} -> {:pos_integer, [], []}
{[:int], [[:>=, 0]]} -> {:non_neg_integer, [], []}
{[:integer], [[:>=, 0]]} -> {:non_neg_integer, [], []}
{[base | _], _} -> schema_to_typespec(base, opts)
{[], _} -> {:any, [], []}
end
end
@primitive_types [
:string, :int, :integer, :float, :number, :boolean, :atom, :keyword,
:any, :nil, :pid, :port, :reference, :"pos-int", :"neg-int", :"nat-int"
]
@compound_heads [:or, :and, :maybe, :enum, :=, :map, :"map-of",
:list, :vector, :set, :tuple, :ref, :schema]
defp recognized_schema?(schema) when is_atom(schema) do
schema in @primitive_types
end
defp recognized_schema?([head | _]) when is_atom(head) do
head in @compound_heads or head == @arrow
end
defp recognized_schema?(_), do: false
defp constraint?([:>, _]), do: true
defp constraint?([:>=, _]), do: true
defp constraint?([:<, _]), do: true
defp constraint?([:<=, _]), do: true
defp constraint?(_), do: false
end
+647
View File
@@ -0,0 +1,647 @@
defmodule CljElixir.Reader do
@moduledoc """
Reader for CljElixir: tokenizes source text and parses it into CljElixir AST.
The reader has two phases:
1. Tokenizer — converts source text into a flat list of tokens
2. Parser — recursive descent over the token list, producing CljElixir AST nodes
## AST representation
Literals are themselves: integers, floats, strings, booleans, nil, atoms (keywords).
Compound forms use tagged tuples:
{:symbol, meta, name}
{:list, meta, [elements]}
{:vector, meta, [elements]}
{:map, meta, [k1, v1, k2, v2, ...]}
{:set, meta, [elements]}
{:tuple, meta, [elements]}
{:regex, meta, pattern}
{:quote, meta, form}
{:with_meta, meta, {metadata, target}}
{:anon_fn, meta, body}
{:quasiquote, meta, form}
{:unquote, meta, form}
{:splice_unquote, meta, form}
{:deref, meta, form}
"""
alias CljElixir.Reader.Token
# ── Public API ──────────────────────────────────────────────────────
@doc """
Read a string of CljElixir source into a list of AST forms.
Returns `{:ok, [form]}` on success, `{:error, message}` on failure.
"""
@spec read_string(String.t()) :: {:ok, list()} | {:error, String.t()}
def read_string(source) when is_binary(source) do
case tokenize(source) do
{:ok, tokens} ->
parse_all(tokens, [])
{:error, _} = err ->
err
end
end
# ════════════════════════════════════════════════════════════════════
# TOKENIZER
# ════════════════════════════════════════════════════════════════════
@doc false
def tokenize(source) do
chars = String.to_charlist(source)
tokenize_loop(chars, 1, 1, [])
end
# ---------- end of input ----------
defp tokenize_loop([], _line, _col, acc), do: {:ok, Enum.reverse(acc)}
# ---------- newline ----------
defp tokenize_loop([?\n | rest], line, _col, acc),
do: tokenize_loop(rest, line + 1, 1, acc)
defp tokenize_loop([?\r, ?\n | rest], line, _col, acc),
do: tokenize_loop(rest, line + 1, 1, acc)
defp tokenize_loop([?\r | rest], line, _col, acc),
do: tokenize_loop(rest, line + 1, 1, acc)
# ---------- whitespace / commas ----------
defp tokenize_loop([c | rest], line, col, acc) when c in [?\s, ?\t, ?,],
do: tokenize_loop(rest, line, col + 1, acc)
# ---------- comments ----------
defp tokenize_loop([?; | rest], line, _col, acc) do
rest = skip_comment(rest)
# skip_comment stops at (but does not consume) the newline or EOF.
# Let the main loop's newline handler increment line/col.
tokenize_loop(rest, line, 1, acc)
end
# ---------- strings ----------
defp tokenize_loop([?" | rest], line, col, acc) do
case read_string_literal(rest, line, col + 1, []) do
{:ok, value, rest2, end_line, end_col} ->
token = %Token{type: :string, value: value, line: line, col: col}
tokenize_loop(rest2, end_line, end_col, [token | acc])
{:error, msg} ->
{:error, msg}
end
end
# ---------- dispatch sequences: #{ #el[ #( #" ----------
defp tokenize_loop([?#, ?e, ?l, ?[ | rest], line, col, acc) do
token = %Token{type: :hash_el_lbracket, value: "#el[", line: line, col: col}
tokenize_loop(rest, line, col + 4, [token | acc])
end
defp tokenize_loop([?#, ?{ | rest], line, col, acc) do
token = %Token{type: :hash_lbrace, value: "\#{", line: line, col: col}
tokenize_loop(rest, line, col + 2, [token | acc])
end
defp tokenize_loop([?#, ?( | rest], line, col, acc) do
token = %Token{type: :hash_lparen, value: "#(", line: line, col: col}
tokenize_loop(rest, line, col + 2, [token | acc])
end
defp tokenize_loop([?#, ?" | rest], line, col, acc) do
case read_string_literal(rest, line, col + 2, []) do
{:ok, value, rest2, end_line, end_col} ->
token = %Token{type: :hash_string, value: value, line: line, col: col}
tokenize_loop(rest2, end_line, end_col, [token | acc])
{:error, msg} ->
{:error, msg}
end
end
# ---------- splice-unquote ~@ (must come before unquote ~) ----------
defp tokenize_loop([?~, ?@ | rest], line, col, acc) do
token = %Token{type: :splice_unquote, value: "~@", line: line, col: col}
tokenize_loop(rest, line, col + 2, [token | acc])
end
# ---------- unquote ~ ----------
defp tokenize_loop([?~ | rest], line, col, acc) do
token = %Token{type: :unquote, value: "~", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
# ---------- delimiters ----------
defp tokenize_loop([?( | rest], line, col, acc) do
token = %Token{type: :lparen, value: "(", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
defp tokenize_loop([?) | rest], line, col, acc) do
token = %Token{type: :rparen, value: ")", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
defp tokenize_loop([?[ | rest], line, col, acc) do
token = %Token{type: :lbracket, value: "[", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
defp tokenize_loop([?] | rest], line, col, acc) do
token = %Token{type: :rbracket, value: "]", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
defp tokenize_loop([?{ | rest], line, col, acc) do
token = %Token{type: :lbrace, value: "{", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
defp tokenize_loop([?} | rest], line, col, acc) do
token = %Token{type: :rbrace, value: "}", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
# ---------- quote ' ----------
defp tokenize_loop([?' | rest], line, col, acc) do
token = %Token{type: :quote, value: "'", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
# ---------- quasiquote ` ----------
defp tokenize_loop([?` | rest], line, col, acc) do
token = %Token{type: :quasiquote, value: "`", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
# ---------- metadata ^ ----------
defp tokenize_loop([?^ | rest], line, col, acc) do
token = %Token{type: :meta, value: "^", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
# ---------- deref @ ----------
defp tokenize_loop([?@ | rest], line, col, acc) do
token = %Token{type: :deref, value: "@", line: line, col: col}
tokenize_loop(rest, line, col + 1, [token | acc])
end
# ---------- keywords ----------
defp tokenize_loop([?: | rest], line, col, acc) do
case read_keyword(rest, line, col) do
{:ok, kw_value, rest2, end_col} ->
token = %Token{type: :keyword, value: kw_value, line: line, col: col}
tokenize_loop(rest2, line, end_col, [token | acc])
{:error, msg} ->
{:error, msg}
end
end
# ---------- negative numbers: -<digit> ----------
# Since whitespace is always consumed before reaching tokenize_loop,
# a standalone `-` followed by a digit is always a negative number literal.
# The `-` inside symbol names (like `my-func`) is consumed by the symbol reader
# and never reaches this clause as a standalone character.
defp tokenize_loop([?- | rest], line, col, acc) do
if starts_with_digit?(rest) do
{:ok, token, rest2, end_col} = read_number(rest, line, col + 1, [?-])
token = %{token | line: line, col: col}
tokenize_loop(rest2, line, end_col, [token | acc])
else
# It's a symbol starting with -
case read_symbol([?- | rest], line, col) do
{:ok, token, rest2, end_col} ->
tokenize_loop(rest2, line, end_col, [token | acc])
end
end
end
# ---------- numbers ----------
defp tokenize_loop([c | _] = chars, line, col, acc) when c in ?0..?9 do
{:ok, token, rest2, end_col} = read_number(chars, line, col, [])
tokenize_loop(rest2, line, end_col, [token | acc])
end
# ---------- symbols (and true/false/nil) ----------
defp tokenize_loop([c | _] = chars, line, col, acc)
when c in ?a..?z or c in ?A..?Z or
c == ?_ or c == ?* or c == ?! or c == ?? or
c == ?< or c == ?> or c == ?= or c == ?+ or
c == ?. or c == ?& or c == ?% do
case read_symbol(chars, line, col) do
{:ok, token, rest, end_col} ->
tokenize_loop(rest, line, end_col, [token | acc])
end
end
# ---------- catch-all: unexpected character ----------
defp tokenize_loop([c | _], line, col, _acc) do
{:error, "Unexpected character '#{<<c::utf8>>}' at line #{line}, col #{col}"}
end
# ── Tokenizer helpers ───────────────────────────────────────────────
# Characters that can continue a symbol (after the start)
defp symbol_continue_char?(c) when c in ?a..?z, do: true
defp symbol_continue_char?(c) when c in ?A..?Z, do: true
defp symbol_continue_char?(c) when c in ?0..?9, do: true
defp symbol_continue_char?(c) when c in [?_, ?*, ?!, ??, ?<, ?>, ?=, ?+, ?-, ?/, ?., ?%, ?&, ?#], do: true
defp symbol_continue_char?(_), do: false
defp starts_with_digit?([c | _]) when c in ?0..?9, do: true
defp starts_with_digit?(_), do: false
defp skip_comment([?\n | _] = rest), do: rest
defp skip_comment([?\r | _] = rest), do: rest
defp skip_comment([]), do: []
defp skip_comment([_ | rest]), do: skip_comment(rest)
# ── String literal reader ──────────────────────────────────────────
defp read_string_literal([], line, _col, _acc),
do: {:error, "Unterminated string starting at line #{line}"}
defp read_string_literal([?" | rest], line, col, acc),
do: {:ok, IO.chardata_to_string(Enum.reverse(acc)), rest, line, col + 1}
defp read_string_literal([?\\, ?" | rest], line, col, acc),
do: read_string_literal(rest, line, col + 2, [?" | acc])
defp read_string_literal([?\\, ?\\ | rest], line, col, acc),
do: read_string_literal(rest, line, col + 2, [?\\ | acc])
defp read_string_literal([?\\, ?n | rest], line, col, acc),
do: read_string_literal(rest, line, col + 2, [?\n | acc])
defp read_string_literal([?\\, ?t | rest], line, col, acc),
do: read_string_literal(rest, line, col + 2, [?\t | acc])
defp read_string_literal([?\\, ?r | rest], line, col, acc),
do: read_string_literal(rest, line, col + 2, [?\r | acc])
defp read_string_literal([?\n | rest], line, _col, acc),
do: read_string_literal(rest, line + 1, 1, [?\n | acc])
defp read_string_literal([c | rest], line, col, acc),
do: read_string_literal(rest, line, col + 1, [c | acc])
# ── Keyword reader ─────────────────────────────────────────────────
# Quoted keyword: :"some-name"
defp read_keyword([?" | rest], line, col) do
case read_string_literal(rest, line, col + 2, []) do
{:ok, value, rest2, _end_line, end_col} ->
{:ok, String.to_atom(value), rest2, end_col}
{:error, msg} ->
{:error, msg}
end
end
# Regular keyword: :name, :my-key, :ok
defp read_keyword(chars, _line, col) do
{name_chars, rest} = take_keyword_chars(chars, [])
case name_chars do
[] ->
{:error, "Expected keyword name after ':'"}
_ ->
name = IO.chardata_to_string(Enum.reverse(name_chars))
atom_val = String.to_atom(name)
{:ok, atom_val, rest, col + 1 + length(name_chars)}
end
end
defp take_keyword_chars([c | rest], acc) when c in ?a..?z or c in ?A..?Z or c in ?0..?9 or c in [?_, ?-, ?!, ??, ?., ?/, ?*, ?+, ?>, ?<, ?=, ?&, ?#],
do: take_keyword_chars(rest, [c | acc])
defp take_keyword_chars(rest, acc), do: {acc, rest}
# ── Number reader ──────────────────────────────────────────────────
defp read_number(chars, line, col, prefix) do
{digit_chars, rest} = take_digits(chars, prefix)
case rest do
[?. | after_dot] ->
case after_dot do
[d | _] when d in ?0..?9 ->
{frac_chars, rest2} = take_digits(after_dot, [?. | digit_chars])
str = IO.chardata_to_string(Enum.reverse(frac_chars))
{float_val, ""} = Float.parse(str)
end_col = col + String.length(str) - length(prefix)
token = %Token{type: :float, value: float_val, line: line, col: col}
{:ok, token, rest2, end_col}
_ ->
# dot not followed by digit — just an integer, leave dot for next token
str = IO.chardata_to_string(Enum.reverse(digit_chars))
{int_val, ""} = Integer.parse(str)
end_col = col + String.length(str) - length(prefix)
token = %Token{type: :integer, value: int_val, line: line, col: col}
{:ok, token, rest, end_col}
end
_ ->
str = IO.chardata_to_string(Enum.reverse(digit_chars))
{int_val, ""} = Integer.parse(str)
end_col = col + String.length(str) - length(prefix)
token = %Token{type: :integer, value: int_val, line: line, col: col}
{:ok, token, rest, end_col}
end
end
defp take_digits([c | rest], acc) when c in ?0..?9,
do: take_digits(rest, [c | acc])
defp take_digits(rest, acc), do: {acc, rest}
# ── Symbol reader ──────────────────────────────────────────────────
defp read_symbol(chars, line, col) do
{sym_chars, rest} = take_symbol_chars(chars, [])
name = IO.chardata_to_string(Enum.reverse(sym_chars))
end_col = col + String.length(name)
token =
case name do
"true" -> %Token{type: :boolean, value: true, line: line, col: col}
"false" -> %Token{type: :boolean, value: false, line: line, col: col}
"nil" -> %Token{type: :nil, value: nil, line: line, col: col}
_ -> %Token{type: :symbol, value: name, line: line, col: col}
end
{:ok, token, rest, end_col}
end
defp take_symbol_chars([c | rest], acc) do
if (acc == [] && symbol_start_char?(c)) || (acc != [] && symbol_continue_char?(c)) do
take_symbol_chars(rest, [c | acc])
else
{acc, [c | rest]}
end
end
defp take_symbol_chars([], acc), do: {acc, []}
defp symbol_start_char?(c) when c in ?a..?z, do: true
defp symbol_start_char?(c) when c in ?A..?Z, do: true
defp symbol_start_char?(c) when c in [?_, ?*, ?!, ??, ?<, ?>, ?=, ?+, ?-, ?., ?&, ?%], do: true
defp symbol_start_char?(_), do: false
# ════════════════════════════════════════════════════════════════════
# PARSER — Recursive Descent
# ════════════════════════════════════════════════════════════════════
# Parse all top-level forms until tokens are exhausted
defp parse_all([], acc), do: {:ok, Enum.reverse(acc)}
defp parse_all(tokens, acc) do
case parse_form(tokens) do
{:ok, form, rest} ->
parse_all(rest, [form | acc])
{:error, _} = err ->
err
end
end
# ── Parse a single form ────────────────────────────────────────────
# Literals
defp parse_form([%Token{type: :integer, value: v} | rest]),
do: {:ok, v, rest}
defp parse_form([%Token{type: :float, value: v} | rest]),
do: {:ok, v, rest}
defp parse_form([%Token{type: :string, value: v} | rest]),
do: {:ok, v, rest}
defp parse_form([%Token{type: :keyword, value: v} | rest]),
do: {:ok, v, rest}
defp parse_form([%Token{type: :boolean, value: v} | rest]),
do: {:ok, v, rest}
defp parse_form([%Token{type: :nil} | rest]),
do: {:ok, nil, rest}
# Symbol
defp parse_form([%Token{type: :symbol, value: name, line: l, col: c} | rest]),
do: {:ok, {:symbol, %{line: l, col: c}, name}, rest}
# List ( ... )
defp parse_form([%Token{type: :lparen, line: l, col: c} | rest]) do
case parse_until(rest, :rparen) do
{:ok, elements, rest2} ->
{:ok, {:list, %{line: l, col: c}, elements}, rest2}
{:error, _} = err ->
err
end
end
# Vector [ ... ]
defp parse_form([%Token{type: :lbracket, line: l, col: c} | rest]) do
case parse_until(rest, :rbracket) do
{:ok, elements, rest2} ->
{:ok, {:vector, %{line: l, col: c}, elements}, rest2}
{:error, _} = err ->
err
end
end
# Map { ... }
defp parse_form([%Token{type: :lbrace, line: l, col: c} | rest]) do
case parse_until(rest, :rbrace) do
{:ok, elements, rest2} ->
{:ok, {:map, %{line: l, col: c}, elements}, rest2}
{:error, _} = err ->
err
end
end
# Set #{ ... }
defp parse_form([%Token{type: :hash_lbrace, line: l, col: c} | rest]) do
case parse_until(rest, :rbrace) do
{:ok, elements, rest2} ->
{:ok, {:set, %{line: l, col: c}, elements}, rest2}
{:error, _} = err ->
err
end
end
# BEAM tuple #el[ ... ]
defp parse_form([%Token{type: :hash_el_lbracket, line: l, col: c} | rest]) do
case parse_until(rest, :rbracket) do
{:ok, elements, rest2} ->
{:ok, {:tuple, %{line: l, col: c}, elements}, rest2}
{:error, _} = err ->
err
end
end
# Anonymous function #( ... )
defp parse_form([%Token{type: :hash_lparen, line: l, col: c} | rest]) do
case parse_until(rest, :rparen) do
{:ok, elements, rest2} ->
body = {:list, %{line: l, col: c}, elements}
{:ok, {:anon_fn, %{line: l, col: c}, body}, rest2}
{:error, _} = err ->
err
end
end
# Regex #"..."
defp parse_form([%Token{type: :hash_string, value: pattern, line: l, col: c} | rest]),
do: {:ok, {:regex, %{line: l, col: c}, pattern}, rest}
# Quote '
defp parse_form([%Token{type: :quote, line: l, col: c} | rest]) do
case parse_form(rest) do
{:ok, form, rest2} ->
{:ok, {:quote, %{line: l, col: c}, form}, rest2}
{:error, _} = err ->
err
end
end
# Quasiquote `
defp parse_form([%Token{type: :quasiquote, line: l, col: c} | rest]) do
case parse_form(rest) do
{:ok, form, rest2} ->
{:ok, {:quasiquote, %{line: l, col: c}, form}, rest2}
{:error, _} = err ->
err
end
end
# Unquote ~
defp parse_form([%Token{type: :unquote, line: l, col: c} | rest]) do
case parse_form(rest) do
{:ok, form, rest2} ->
{:ok, {:unquote, %{line: l, col: c}, form}, rest2}
{:error, _} = err ->
err
end
end
# Splice-unquote ~@
defp parse_form([%Token{type: :splice_unquote, line: l, col: c} | rest]) do
case parse_form(rest) do
{:ok, form, rest2} ->
{:ok, {:splice_unquote, %{line: l, col: c}, form}, rest2}
{:error, _} = err ->
err
end
end
# Deref @
defp parse_form([%Token{type: :deref, line: l, col: c} | rest]) do
case parse_form(rest) do
{:ok, form, rest2} ->
{:ok, {:deref, %{line: l, col: c}, form}, rest2}
{:error, _} = err ->
err
end
end
# Metadata ^
defp parse_form([%Token{type: :meta, line: l, col: c} | rest]) do
case parse_meta_value(rest, l, c) do
{:ok, meta_form, rest2} ->
case parse_form(rest2) do
{:ok, target, rest3} ->
{:ok, {:with_meta, %{line: l, col: c}, {meta_form, target}}, rest3}
{:error, _} = err ->
err
end
{:error, _} = err ->
err
end
end
# Unexpected token
defp parse_form([%Token{type: type, line: l, col: c} | _]),
do: {:error, "Unexpected token #{type} at line #{l}, col #{c}"}
defp parse_form([]),
do: {:error, "Unexpected end of input"}
# ── Parse helpers ──────────────────────────────────────────────────
# Parse elements until a closing delimiter token type is found
defp parse_until(tokens, closer) do
parse_until_loop(tokens, closer, [])
end
defp parse_until_loop([], closer, _acc) do
name = delimiter_name(closer)
{:error, "Unexpected end of input, expected '#{name}'"}
end
defp parse_until_loop([%Token{type: type} | rest], closer, acc) when type == closer do
{:ok, Enum.reverse(acc), rest}
end
defp parse_until_loop(tokens, closer, acc) do
case parse_form(tokens) do
{:ok, form, rest} ->
parse_until_loop(rest, closer, [form | acc])
{:error, _} = err ->
err
end
end
# Parse the value after ^ (metadata)
# ^{...} — map metadata
defp parse_meta_value([%Token{type: :lbrace, line: l, col: c} | rest], _ml, _mc) do
case parse_until(rest, :rbrace) do
{:ok, elements, rest2} ->
{:ok, {:map, %{line: l, col: c}, elements}, rest2}
{:error, _} = err ->
err
end
end
# ^:keyword — sugar for ^{:keyword true}
defp parse_meta_value([%Token{type: :keyword, value: kw, line: l, col: c} | rest], _ml, _mc) do
meta_map = {:map, %{line: l, col: c}, [kw, true]}
{:ok, meta_map, rest}
end
# ^symbol — sugar for ^{:tag symbol}
defp parse_meta_value([%Token{type: :symbol} | _] = tokens, _ml, _mc) do
case parse_form(tokens) do
{:ok, form, rest} -> {:ok, form, rest}
{:error, _} = err -> err
end
end
defp parse_meta_value(_tokens, ml, mc) do
{:error, "Expected metadata value (map, keyword, or symbol) at line #{ml}, col #{mc}"}
end
defp delimiter_name(:rparen), do: ")"
defp delimiter_name(:rbracket), do: "]"
defp delimiter_name(:rbrace), do: "}"
end
+14
View File
@@ -0,0 +1,14 @@
defmodule CljElixir.Reader.Token do
@moduledoc """
A token produced by the CljElixir tokenizer.
Types:
:integer, :float, :string, :keyword, :symbol, :boolean, :nil,
:lparen, :rparen, :lbracket, :rbracket, :lbrace, :rbrace,
:hash_lbrace, :hash_el_lbracket, :hash_lparen, :hash_string,
:quote, :quasiquote, :unquote, :splice_unquote,
:meta, :deref
"""
defstruct [:type, :value, :line, :col]
end
File diff suppressed because it is too large Load Diff
+336
View File
@@ -0,0 +1,336 @@
defmodule Mix.Tasks.Compile.CljElixir do
@moduledoc """
Mix compiler plugin for CljElixir `.clje` files.
Integrates `.clje` source files into the standard Mix build pipeline.
Supports incremental compilation via a manifest that tracks source file
modification times and the modules they produce.
## Configuration
In your `mix.exs`, add `:compile.clj_elixir` to your compilers and
configure source paths:
def project do
[
compilers: [:clj_elixir] ++ Mix.compilers(),
clj_elixir_paths: ["src"]
]
end
## How It Works
1. Scans configured source paths for `.clje` files
2. Checks the manifest for previously compiled files and their mtimes
3. Compiles only stale files (new or modified since last build)
4. Writes `.beam` files to the build output directory
5. Updates the manifest with new module info
6. Returns `{:ok, diagnostics}` or `{:error, diagnostics}`
## Manifest
The manifest is stored at `_build/ENV/.clj_elixir_manifest` and tracks
`{source_path, mtime, [module_names]}` tuples for incremental compilation.
"""
use Mix.Task.Compiler
@manifest_filename ".clj_elixir_manifest"
@recursive true
@impl true
def run(argv) do
{opts, _, _} =
OptionParser.parse(argv,
switches: [force: :boolean, verbose: :boolean],
aliases: [f: :force, v: :verbose]
)
force? = opts[:force] || false
verbose? = opts[:verbose] || false
project = Mix.Project.config()
source_paths = project[:clj_elixir_paths] || ["src"]
build_path = Mix.Project.compile_path(project)
# Ensure build directory exists
File.mkdir_p!(build_path)
# Find all .clje source files
sources = find_sources(source_paths)
if sources == [] do
{:noop, []}
else
manifest_path = manifest_path()
manifest = load_manifest(manifest_path)
# Determine which files need recompilation
{stale, removed} = partition_sources(sources, manifest, force?)
if stale == [] and removed == [] do
if verbose?, do: Mix.shell().info("All .clje files are up to date")
{:noop, []}
else
# Clean up modules from removed source files
removed_diagnostics = clean_removed(removed, manifest, build_path, verbose?)
# Compile stale files
{compiled, diagnostics} = compile_stale(stale, build_path, verbose?)
# Build new manifest from existing (unchanged) + newly compiled
unchanged_entries =
manifest
|> Enum.reject(fn {path, _mtime, _modules} ->
path in stale or path in removed
end)
new_manifest = unchanged_entries ++ compiled
save_manifest(manifest_path, new_manifest)
all_diagnostics = removed_diagnostics ++ diagnostics
has_errors? = Enum.any?(all_diagnostics, &(&1.severity == :error))
if has_errors? do
{:error, to_mix_diagnostics(all_diagnostics)}
else
{:ok, to_mix_diagnostics(all_diagnostics)}
end
end
end
end
@impl true
def manifests do
[manifest_path()]
end
@impl true
def clean do
manifest_path = manifest_path()
manifest = load_manifest(manifest_path)
build_path = Mix.Project.compile_path()
Enum.each(manifest, fn {_path, _mtime, modules} ->
Enum.each(modules, fn module ->
beam_file = Path.join(build_path, "#{module}.beam")
File.rm(beam_file)
end)
end)
File.rm(manifest_path)
:ok
end
# ---------------------------------------------------------------------------
# Source discovery
# ---------------------------------------------------------------------------
defp find_sources(paths) do
paths
|> Enum.flat_map(fn path ->
path
|> Path.join("**/*.clje")
|> Path.wildcard()
end)
|> Enum.sort_by(fn path ->
parts = Path.split(path)
basename = Path.basename(path, ".clje")
# Protocols must compile first (priority 0), then everything else (priority 1)
priority = if basename == "protocols", do: 0, else: 1
{-length(parts), priority, path}
end)
end
# ---------------------------------------------------------------------------
# Staleness detection
# ---------------------------------------------------------------------------
defp partition_sources(sources, manifest, force?) do
manifest_map = Map.new(manifest, fn {path, mtime, modules} -> {path, {mtime, modules}} end)
stale =
if force? do
sources
else
Enum.filter(sources, fn source ->
case Map.get(manifest_map, source) do
nil ->
true
{old_mtime, _modules} ->
current_mtime = file_mtime(source)
current_mtime > old_mtime
end
end)
end
source_set = MapSet.new(sources)
removed =
manifest
|> Enum.map(fn {path, _mtime, _modules} -> path end)
|> Enum.reject(&MapSet.member?(source_set, &1))
{stale, removed}
end
# ---------------------------------------------------------------------------
# Compilation
# ---------------------------------------------------------------------------
defp compile_stale(sources, build_path, verbose?) do
results =
Enum.map(sources, fn source ->
if verbose?, do: Mix.shell().info("Compiling #{source}")
compile_source(source, build_path)
end)
{compiled, diagnostics} =
Enum.reduce(results, {[], []}, fn
{:ok, entry, diags}, {compiled, all_diags} ->
{[entry | compiled], all_diags ++ diags}
{:error, diags}, {compiled, all_diags} ->
{compiled, all_diags ++ diags}
end)
{Enum.reverse(compiled), diagnostics}
end
defp compile_source(source, build_path) do
case CljElixir.Compiler.compile_file_to_beam(source,
output_dir: build_path,
vector_as_list: true
) do
{:ok, modules} ->
mtime = file_mtime(source)
module_names = Enum.map(modules, fn {mod, _binary} -> mod end)
entry = {source, mtime, module_names}
diagnostics =
if module_names == [] do
[
%{
severity: :warning,
message: "#{source} produced no modules",
file: source,
line: 0,
col: 0
}
]
else
[]
end
{:ok, entry, diagnostics}
{:error, diagnostics} ->
enriched =
Enum.map(diagnostics, fn diag ->
Map.put_new(diag, :file, source)
end)
{:error, enriched}
end
end
# ---------------------------------------------------------------------------
# Cleanup
# ---------------------------------------------------------------------------
defp clean_removed(removed, manifest, build_path, verbose?) do
manifest_map = Map.new(manifest, fn {path, mtime, modules} -> {path, {mtime, modules}} end)
Enum.flat_map(removed, fn path ->
case Map.get(manifest_map, path) do
nil ->
[]
{_mtime, modules} ->
if verbose?, do: Mix.shell().info("Cleaning removed source #{path}")
Enum.each(modules, fn module ->
beam_file = Path.join(build_path, "#{module}.beam")
File.rm(beam_file)
# Purge the module from the code server
:code.purge(module)
:code.delete(module)
end)
[]
end
end)
end
# ---------------------------------------------------------------------------
# Manifest I/O
# ---------------------------------------------------------------------------
defp manifest_path do
Path.join(Mix.Project.manifest_path(), @manifest_filename)
end
defp load_manifest(path) do
case File.read(path) do
{:ok, contents} ->
try do
contents
|> :erlang.binary_to_term()
|> validate_manifest()
rescue
_ -> []
end
{:error, _} ->
[]
end
end
defp validate_manifest(data) when is_list(data) do
Enum.filter(data, fn
{path, mtime, modules}
when is_binary(path) and is_integer(mtime) and is_list(modules) ->
true
_ ->
false
end)
end
defp validate_manifest(_), do: []
defp save_manifest(path, entries) do
File.mkdir_p!(Path.dirname(path))
File.write!(path, :erlang.term_to_binary(entries))
end
# ---------------------------------------------------------------------------
# Diagnostics
# ---------------------------------------------------------------------------
defp to_mix_diagnostics(diagnostics) do
Enum.map(diagnostics, fn diag ->
%Mix.Task.Compiler.Diagnostic{
file: Map.get(diag, :file, "unknown"),
severity: diag.severity,
message: diag.message,
position: diag.line,
compiler_name: "clj_elixir"
}
end)
end
# ---------------------------------------------------------------------------
# File utilities
# ---------------------------------------------------------------------------
defp file_mtime(path) do
case File.stat(path, time: :posix) do
{:ok, %{mtime: mtime}} -> mtime
{:error, _} -> 0
end
end
end
+27
View File
@@ -0,0 +1,27 @@
defmodule CljElixir.MixProject do
use Mix.Project
def project do
[
app: :clj_elixir,
version: "0.1.0",
elixir: "~> 1.16",
start_permanent: Mix.env() == :prod,
compilers: Mix.compilers() ++ [:clj_elixir],
deps: deps(),
elixirc_paths: ["lib"],
test_paths: ["test"],
clj_elixir_paths: ["src"]
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[]
end
end
+20
View File
@@ -0,0 +1,20 @@
;; CljElixir Core Functions
;; Compound functions that dispatch through protocols
(defmodule CljElixir.Core
(defn get-in [m ks]
(Enum/reduce ks m (fn [k acc] (CljElixir.ILookup/lookup acc k))))
(defn assoc-in [m ks v]
(let [k (hd ks)
rest-ks (tl ks)]
(if (= rest-ks [])
(CljElixir.IAssociative/assoc m k v)
(CljElixir.IAssociative/assoc m k (assoc-in (CljElixir.ILookup/lookup m k) rest-ks v)))))
(defn update-in [m ks f]
(let [k (hd ks)
rest-ks (tl ks)]
(if (= rest-ks [])
(CljElixir.IAssociative/assoc m k (erlang/apply f [(CljElixir.ILookup/lookup m k)]))
(CljElixir.IAssociative/assoc m k (update-in (CljElixir.ILookup/lookup m k) rest-ks f))))))
+251
View File
@@ -0,0 +1,251 @@
;; CljElixir PersistentVector — bit-partitioned trie
;; Ported from ClojureScript's PersistentVector
;;
;; BEAM adaptations:
;; - "Arrays" are BEAM tuples (immutable, O(1) indexed access via elem)
;; - aset = put-elem (returns new tuple)
;; - Tail grows via :erlang/append-element
;;
;; NOTE: Uses Map/get for struct field access (not keyword-as-function)
;; because ILookup is not yet implemented for these struct types.
;; ---------------------------------------------------------------------------
;; VectorNode — trie node containing up to 32 children
;; ---------------------------------------------------------------------------
(defrecord CljElixir.VectorNode [edit arr])
;; ---------------------------------------------------------------------------
;; PersistentVector — bit-partitioned trie with O(log32 n) indexed access
;; ---------------------------------------------------------------------------
(defrecord CljElixir.PersistentVector [meta cnt shift root tail]
;; --- Constants as zero-arity functions ---
(defn empty-node []
(CljElixir.VectorNode/new nil (erlang/make-tuple 32 nil)))
(defn empty-vec []
(CljElixir.PersistentVector/new nil 0 5 (empty-node) (erlang/make-tuple 0 nil)))
;; --- Internal helpers ---
(defn- tail-off [cnt]
(if (< cnt 32)
0
(erlang/bsl (erlang/bsr (dec cnt) 5) 5)))
(defn- array-for [pv i]
(let [cnt (Map/get pv :cnt)]
(if (and (>= i 0) (< i cnt))
(if (>= i (tail-off cnt))
(Map/get pv :tail)
(loop [node (Map/get pv :root)
level (Map/get pv :shift)]
(if (> level 0)
(let [child-idx (erlang/band (erlang/bsr i level) 31)]
(recur (elem (Map/get node :arr) child-idx)
(- level 5)))
(Map/get node :arr))))
(throw (str "Index " i " out of bounds for vector of size " cnt)))))
;; --- Nth ---
(defn pv-nth
([pv i]
(let [node (array-for pv i)]
(elem node (erlang/band i 31))))
([pv i not-found]
(if (and (>= i 0) (< i (Map/get pv :cnt)))
(pv-nth pv i)
not-found)))
;; --- Path operations ---
(defn- new-path [level node]
(if (= level 0)
node
(let [new-arr (put-elem (erlang/make-tuple 32 nil) 0 (new-path (- level 5) node))]
(CljElixir.VectorNode/new nil new-arr))))
(defn- push-tail [cnt level parent tail-node]
(let [subidx (erlang/band (erlang/bsr (dec cnt) level) 31)
parent-arr (Map/get parent :arr)
node-to-insert
(if (= level 5)
tail-node
(let [child (elem parent-arr subidx)]
(if (not (nil? child))
(push-tail cnt (- level 5) child tail-node)
(new-path (- level 5) tail-node))))
new-arr (put-elem parent-arr subidx node-to-insert)]
(CljElixir.VectorNode/new nil new-arr)))
;; --- Conj (append) ---
(defn pv-conj [pv val]
(let [cnt (Map/get pv :cnt)
tail (Map/get pv :tail)
tail-len (tuple-size tail)
meta (Map/get pv :meta)
shift (Map/get pv :shift)
root (Map/get pv :root)]
(if (< tail-len 32)
;; Room in tail
(CljElixir.PersistentVector/new
meta (inc cnt) shift root
(erlang/append-element tail val))
;; Tail full — push into trie
(let [tail-node (CljElixir.VectorNode/new nil tail)
overflow? (> (erlang/bsr cnt 5) (erlang/bsl 1 shift))]
(if overflow?
;; New root level
(let [new-arr (put-elem
(put-elem (erlang/make-tuple 32 nil) 0 root)
1 (new-path shift tail-node))]
(CljElixir.PersistentVector/new
meta (inc cnt) (+ shift 5)
(CljElixir.VectorNode/new nil new-arr)
(erlang/make-tuple 1 val)))
;; Room at current depth
(CljElixir.PersistentVector/new
meta (inc cnt) shift
(push-tail cnt shift root tail-node)
(erlang/make-tuple 1 val)))))))
;; --- Assoc (update at index) ---
(defn- do-assoc [level node i val]
(let [node-arr (Map/get node :arr)
node-edit (Map/get node :edit)]
(if (= level 0)
(CljElixir.VectorNode/new node-edit
(put-elem node-arr (erlang/band i 31) val))
(let [subidx (erlang/band (erlang/bsr i level) 31)
new-child (do-assoc (- level 5) (elem node-arr subidx) i val)]
(CljElixir.VectorNode/new node-edit
(put-elem node-arr subidx new-child))))))
(defn pv-assoc [pv i val]
(let [cnt (Map/get pv :cnt)]
(cond
(and (>= i 0) (< i cnt))
(if (>= i (tail-off cnt))
(CljElixir.PersistentVector/new
(Map/get pv :meta) cnt (Map/get pv :shift) (Map/get pv :root)
(put-elem (Map/get pv :tail) (erlang/band i 31) val))
(CljElixir.PersistentVector/new
(Map/get pv :meta) cnt (Map/get pv :shift)
(do-assoc (Map/get pv :shift) (Map/get pv :root) i val)
(Map/get pv :tail)))
(= i cnt)
(pv-conj pv val)
true
(throw (str "Index " i " out of bounds for assoc on vector of size " cnt)))))
;; --- Pop (remove last) ---
(defn- pop-tail [cnt level node]
(let [subidx (erlang/band (erlang/bsr (dec cnt) level) 31)
node-arr (Map/get node :arr)
node-edit (Map/get node :edit)]
(cond
(> level 5)
(let [new-child (pop-tail cnt (- level 5) (elem node-arr subidx))]
(if (and (nil? new-child) (= subidx 0))
nil
(CljElixir.VectorNode/new node-edit
(put-elem node-arr subidx new-child))))
(= subidx 0)
nil
true
(CljElixir.VectorNode/new node-edit
(put-elem node-arr subidx nil)))))
(defn pv-pop [pv]
(let [cnt (Map/get pv :cnt)]
(cond
(= cnt 0)
(throw "Can't pop empty vector")
(= cnt 1)
(empty-vec)
true
(let [tail (Map/get pv :tail)
tail-len (tuple-size tail)]
(if (> tail-len 1)
;; Shrink tail
(let [new-tail (List/to-tuple (lists/droplast (Tuple/to-list tail)))]
(CljElixir.PersistentVector/new
(Map/get pv :meta) (dec cnt) (Map/get pv :shift) (Map/get pv :root) new-tail))
;; Pull last leaf from trie
(let [new-tail (array-for pv (- cnt 2))
shift (Map/get pv :shift)
new-root (pop-tail cnt shift (Map/get pv :root))
new-root (if (nil? new-root) (empty-node) new-root)
squish? (and (> shift 5) (nil? (elem (Map/get new-root :arr) 1)))
new-root (if squish? (elem (Map/get new-root :arr) 0) new-root)
new-shift (if squish? (- shift 5) shift)]
(CljElixir.PersistentVector/new
(Map/get pv :meta) (dec cnt) new-shift new-root new-tail)))))))
;; --- Construction ---
(defn from-list [xs]
(Enum/reduce xs (empty-vec) (fn [x acc] (CljElixir.PersistentVector/pv-conj acc x))))
(defn to-list [pv]
(let [cnt (Map/get pv :cnt)]
(if (= cnt 0)
(list)
(loop [i 0
acc (list)]
(if (< i cnt)
(recur (inc i) (++ acc (list (pv-nth pv i))))
acc)))))
;; --- Utility ---
(defn pv-count [pv]
(Map/get pv :cnt))
(defn pv-with-meta [pv new-meta]
(CljElixir.PersistentVector/new
new-meta (Map/get pv :cnt) (Map/get pv :shift) (Map/get pv :root) (Map/get pv :tail))))
;; ---------------------------------------------------------------------------
;; SubVector — efficient view into an existing PersistentVector
;; ---------------------------------------------------------------------------
(defrecord CljElixir.SubVector [meta v start end]
(defn sv-new
([v start] (CljElixir.SubVector/new nil v start (Map/get v :cnt)))
([v start end-idx]
;; If v is already a SubVector, flatten
(if (Kernel/is-struct v CljElixir.SubVector)
(CljElixir.SubVector/new nil (Map/get v :v) (+ (Map/get v :start) start) (+ (Map/get v :start) end-idx))
(CljElixir.SubVector/new nil v start end-idx))))
(defn sv-count [sv] (- (Map/get sv :end) (Map/get sv :start)))
(defn sv-nth
([sv i]
(let [actual-i (+ (Map/get sv :start) i)]
(if (and (>= i 0) (< actual-i (Map/get sv :end)))
(CljElixir.PersistentVector/pv-nth (Map/get sv :v) actual-i)
(throw (str "Index " i " out of bounds for subvec of size " (sv-count sv))))))
([sv i not-found]
(let [actual-i (+ (Map/get sv :start) i)]
(if (and (>= i 0) (< actual-i (Map/get sv :end)))
(CljElixir.PersistentVector/pv-nth (Map/get sv :v) actual-i)
not-found))))
(defn sv-to-list [sv]
(let [start (Map/get sv :start)
end-idx (Map/get sv :end)
v (Map/get sv :v)]
(loop [i start acc (list)]
(if (< i end-idx)
(recur (inc i) (++ acc (list (CljElixir.PersistentVector/pv-nth v i))))
acc)))))
+420
View File
@@ -0,0 +1,420 @@
;; CljElixir Core Protocols
;; Defines the fundamental protocols for CljElixir's data abstractions
;; ---- Protocol Definitions ----
(defprotocol CljElixir.ILookup
(-lookup [o k] [o k not-found]))
(defprotocol CljElixir.IAssociative
(-contains-key? [coll k])
(-assoc [coll k v]))
(defprotocol CljElixir.IMap
(-dissoc [coll k]))
(defprotocol CljElixir.ICounted
(-count [coll]))
(defprotocol CljElixir.ISeqable
(-seq [o]))
(defprotocol CljElixir.ISeq
(-first [coll])
(-rest [coll]))
(defprotocol CljElixir.ICollection
(-conj [coll o]))
(defprotocol CljElixir.IIndexed
(-nth [coll n] [coll n not-found]))
(defprotocol CljElixir.IFn
(-invoke [o] [o a] [o a b] [o a b c]))
(defprotocol CljElixir.IMeta
(-meta [o]))
(defprotocol CljElixir.IWithMeta
(-with-meta [o meta]))
(defprotocol CljElixir.IStack
(-peek [coll])
(-pop [coll]))
(defprotocol CljElixir.IMapEntry
(-key [coll])
(-val [coll]))
(defprotocol CljElixir.IKVReduce
(-kv-reduce [coll f init]))
(defprotocol CljElixir.IHash
(-hash [o]))
(defprotocol CljElixir.IEquiv
(-equiv [o other]))
(defprotocol CljElixir.IClojurify
(-clojurify [o]))
(defprotocol CljElixir.IElixirify
(-elixirify [o]))
;; ---- Type Extensions ----
;; Map extensions
(extend-type Map
CljElixir.ILookup
(-lookup
([o k] (Map/get o k))
([o k not-found] (Map/get o k not-found)))
CljElixir.IAssociative
(-contains-key? [coll k] (Map/has-key? coll k))
(-assoc [coll k v] (Map/put coll k v))
CljElixir.IMap
(-dissoc [coll k] (Map/delete coll k))
CljElixir.ICounted
(-count [coll] (map-size coll))
CljElixir.ISeqable
(-seq [o] (Map/to-list o))
CljElixir.ICollection
(-conj [coll o] (Map/merge coll o))
CljElixir.IFn
(-invoke
([_o] (throw "map invoke requires at least one argument"))
([o k] (Map/get o k))
([o k not-found] (Map/get o k not-found))
([_o _a _b _c] (throw "map invoke supports at most two arguments")))
CljElixir.IKVReduce
(-kv-reduce [coll f init]
(Enum/reduce (Map/to-list coll) init
(fn [entry acc] (erlang/apply f [acc (elem entry 0) (elem entry 1)]))))
CljElixir.IHash
(-hash [o] (erlang/phash2 o))
CljElixir.IEquiv
(-equiv [o other] (= o other)))
;; List extensions
(extend-type List
CljElixir.ISeq
(-first [coll] (if (= coll []) nil (hd coll)))
(-rest [coll] (if (= coll []) [] (tl coll)))
CljElixir.ICounted
(-count [coll] (length coll))
CljElixir.ISeqable
(-seq [o] (if (= o []) nil o))
CljElixir.ICollection
(-conj [coll o] (cons o coll))
CljElixir.IStack
(-peek [coll] (if (= coll []) nil (hd coll)))
(-pop [coll] (if (= coll []) [] (tl coll)))
CljElixir.IHash
(-hash [o] (erlang/phash2 o))
CljElixir.IEquiv
(-equiv [o other] (= o other)))
;; Tuple extensions
(extend-type Tuple
CljElixir.ICounted
(-count [coll] (tuple-size coll))
CljElixir.IIndexed
(-nth
([coll n] (elem coll n))
([coll n not-found]
(if (< n (tuple-size coll))
(elem coll n)
not-found)))
CljElixir.IHash
(-hash [o] (erlang/phash2 o))
CljElixir.IEquiv
(-equiv [o other] (= o other)))
;; BitString extensions
(extend-type BitString
CljElixir.ICounted
(-count [coll] (byte-size coll)))
;; PersistentVector extensions
(extend-type CljElixir.PersistentVector
CljElixir.ICounted
(-count [pv] (CljElixir.PersistentVector/pv-count pv))
CljElixir.IIndexed
(-nth
([pv n] (CljElixir.PersistentVector/pv-nth pv n))
([pv n not-found] (CljElixir.PersistentVector/pv-nth pv n not-found)))
CljElixir.ILookup
(-lookup
([pv k]
(if (is-integer k)
(if (and (>= k 0) (< k (CljElixir.PersistentVector/pv-count pv)))
(CljElixir.PersistentVector/pv-nth pv k)
nil)
nil))
([pv k not-found]
(if (is-integer k)
(if (and (>= k 0) (< k (CljElixir.PersistentVector/pv-count pv)))
(CljElixir.PersistentVector/pv-nth pv k)
not-found)
not-found)))
CljElixir.IAssociative
(-contains-key? [pv k]
(and (is-integer k) (>= k 0) (< k (CljElixir.PersistentVector/pv-count pv))))
(-assoc [pv k v] (CljElixir.PersistentVector/pv-assoc pv k v))
CljElixir.ICollection
(-conj [pv val] (CljElixir.PersistentVector/pv-conj pv val))
CljElixir.ISeqable
(-seq [pv]
(if (= (CljElixir.PersistentVector/pv-count pv) 0)
nil
(CljElixir.PersistentVector/to-list pv)))
CljElixir.ISeq
(-first [pv]
(if (= (CljElixir.PersistentVector/pv-count pv) 0)
nil
(CljElixir.PersistentVector/pv-nth pv 0)))
(-rest [pv]
(if (= (CljElixir.PersistentVector/pv-count pv) 0)
(list)
(tl (CljElixir.PersistentVector/to-list pv))))
CljElixir.IStack
(-peek [pv]
(if (= (CljElixir.PersistentVector/pv-count pv) 0)
nil
(CljElixir.PersistentVector/pv-nth pv (dec (CljElixir.PersistentVector/pv-count pv)))))
(-pop [pv] (CljElixir.PersistentVector/pv-pop pv))
CljElixir.IFn
(-invoke
([_pv] (throw "vector invoke requires at least one argument"))
([pv i] (CljElixir.PersistentVector/pv-nth pv i))
([_pv _a _b] (throw "vector invoke supports at most one argument"))
([_pv _a _b _c] (throw "vector invoke supports at most one argument")))
CljElixir.IMeta
(-meta [pv] (Map/get pv :meta))
CljElixir.IWithMeta
(-with-meta [pv m] (CljElixir.PersistentVector/pv-with-meta pv m))
CljElixir.IKVReduce
(-kv-reduce [pv f init]
(let [cnt (CljElixir.PersistentVector/pv-count pv)]
(loop [i 0
acc init]
(if (< i cnt)
(recur (inc i) (erlang/apply f [acc i (CljElixir.PersistentVector/pv-nth pv i)]))
acc))))
CljElixir.IHash
(-hash [pv] (erlang/phash2 (CljElixir.PersistentVector/to-list pv)))
CljElixir.IEquiv
(-equiv [pv other]
(let [pv-list (CljElixir.PersistentVector/to-list pv)]
(cond
(is-list other)
(= pv-list other)
(vector? other)
(= pv-list (CljElixir.PersistentVector/to-list other))
true
false))))
;; SubVector extensions
(extend-type CljElixir.SubVector
CljElixir.ICounted
(-count [sv] (CljElixir.SubVector/sv-count sv))
CljElixir.IIndexed
(-nth
([sv n] (CljElixir.SubVector/sv-nth sv n))
([sv n not-found] (CljElixir.SubVector/sv-nth sv n not-found)))
CljElixir.ILookup
(-lookup
([sv k]
(if (is-integer k)
(if (and (>= k 0) (< k (CljElixir.SubVector/sv-count sv)))
(CljElixir.SubVector/sv-nth sv k)
nil)
nil))
([sv k not-found]
(if (is-integer k)
(if (and (>= k 0) (< k (CljElixir.SubVector/sv-count sv)))
(CljElixir.SubVector/sv-nth sv k)
not-found)
not-found)))
CljElixir.ISeqable
(-seq [sv]
(if (= (CljElixir.SubVector/sv-count sv) 0)
nil
(CljElixir.SubVector/sv-to-list sv)))
CljElixir.ISeq
(-first [sv]
(if (= (CljElixir.SubVector/sv-count sv) 0)
nil
(CljElixir.SubVector/sv-nth sv 0)))
(-rest [sv]
(if (= (CljElixir.SubVector/sv-count sv) 0)
(list)
(tl (CljElixir.SubVector/sv-to-list sv))))
CljElixir.IStack
(-peek [sv]
(if (= (CljElixir.SubVector/sv-count sv) 0)
nil
(CljElixir.SubVector/sv-nth sv (dec (CljElixir.SubVector/sv-count sv)))))
(-pop [sv]
(let [cnt (CljElixir.SubVector/sv-count sv)]
(if (= cnt 0)
(throw "Can't pop empty subvec")
(CljElixir.SubVector/sv-new (Map/get sv :v) (Map/get sv :start) (dec (Map/get sv :end))))))
CljElixir.IHash
(-hash [sv] (erlang/phash2 (CljElixir.SubVector/sv-to-list sv)))
CljElixir.IEquiv
(-equiv [sv other]
(let [sv-list (CljElixir.SubVector/sv-to-list sv)]
(cond
(is-list other)
(= sv-list other)
(vector? other)
(= sv-list (CljElixir.PersistentVector/to-list other))
true
false))))
;; Tuple sequence and collection extensions
(extend-type Tuple
CljElixir.ISeqable
(-seq [o] (if (= (tuple-size o) 0) nil (Tuple/to-list o)))
CljElixir.ISeq
(-first [o] (if (= (tuple-size o) 0) nil (elem o 0)))
(-rest [o] (if (= (tuple-size o) 0) (list) (tl (Tuple/to-list o))))
CljElixir.ICollection
(-conj [o x] (erlang/append-element o x)))
;; ---- Phase 4: Clojurify/Elixirify ----
;; IClojurify - deep convert BEAM types → CljElixir types
;; Tuples and lists become PersistentVectors; maps walk values; scalars pass through
(extend-type Tuple
CljElixir.IClojurify
(-clojurify [o]
(CljElixir.PersistentVector/from-list
(Enum/map (Tuple/to-list o) (fn [x] (CljElixir.IClojurify/clojurify x))))))
(extend-type List
CljElixir.IClojurify
(-clojurify [o]
(CljElixir.PersistentVector/from-list
(Enum/map o (fn [x] (CljElixir.IClojurify/clojurify x))))))
(extend-type Map
CljElixir.IClojurify
(-clojurify [o]
(Map/new (Enum/map o (fn [entry] #el[(elem entry 0) (CljElixir.IClojurify/clojurify (elem entry 1))])))))
(extend-type CljElixir.PersistentVector
CljElixir.IClojurify
(-clojurify [o]
(CljElixir.PersistentVector/from-list
(Enum/map (CljElixir.PersistentVector/to-list o) (fn [x] (CljElixir.IClojurify/clojurify x))))))
(extend-type Atom
CljElixir.IClojurify
(-clojurify [o] o))
(extend-type Integer
CljElixir.IClojurify
(-clojurify [o] o))
(extend-type Float
CljElixir.IClojurify
(-clojurify [o] o))
(extend-type BitString
CljElixir.IClojurify
(-clojurify [o] o))
;; IElixirify - deep convert CljElixir types → BEAM types
;; PersistentVectors become lists; tuples walk elements; maps walk values; scalars pass through
(extend-type CljElixir.PersistentVector
CljElixir.IElixirify
(-elixirify [o]
(Enum/map (CljElixir.PersistentVector/to-list o) (fn [x] (CljElixir.IElixirify/elixirify x)))))
(extend-type List
CljElixir.IElixirify
(-elixirify [o]
(Enum/map o (fn [x] (CljElixir.IElixirify/elixirify x)))))
(extend-type Map
CljElixir.IElixirify
(-elixirify [o]
(Map/new (Enum/map o (fn [entry] #el[(elem entry 0) (CljElixir.IElixirify/elixirify (elem entry 1))])))))
(extend-type Tuple
CljElixir.IElixirify
(-elixirify [o]
(erlang/list-to-tuple
(Enum/map (Tuple/to-list o) (fn [x] (CljElixir.IElixirify/elixirify x))))))
(extend-type Atom
CljElixir.IElixirify
(-elixirify [o] o))
(extend-type Integer
CljElixir.IElixirify
(-elixirify [o] o))
(extend-type Float
CljElixir.IElixirify
(-elixirify [o] o))
(extend-type BitString
CljElixir.IElixirify
(-elixirify [o] o))
;; SubVector clojurify/elixirify
(extend-type CljElixir.SubVector
CljElixir.IClojurify
(-clojurify [o]
(CljElixir.PersistentVector/from-list
(Enum/map (CljElixir.SubVector/sv-to-list o) (fn [x] (CljElixir.IClojurify/clojurify x))))))
(extend-type CljElixir.SubVector
CljElixir.IElixirify
(-elixirify [o]
(Enum/map (CljElixir.SubVector/sv-to-list o) (fn [x] (CljElixir.IElixirify/elixirify x)))))
+151
View File
@@ -0,0 +1,151 @@
defmodule CljElixir.CompilerTest do
use ExUnit.Case, async: true
describe "compile_string/2" do
test "returns {:ok, ast} for valid source" do
# This test exercises the full pipeline. It requires Reader and Transformer
# to be implemented. Until then, it verifies the Compiler module compiles
# and the function heads are correct.
source = "(+ 1 2)"
case CljElixir.Compiler.compile_string(source) do
{:ok, _ast} ->
:ok
{:error, diagnostics} ->
# Expected when Reader/Transformer are not yet implemented
assert is_list(diagnostics)
end
end
test "returns {:error, diagnostics} for missing file" do
{:error, diagnostics} = CljElixir.Compiler.compile_file("/nonexistent/path.clje")
assert is_list(diagnostics)
assert length(diagnostics) > 0
[diag | _] = diagnostics
assert diag.severity == :error
assert diag.message =~ "could not read file"
end
test "passes file option through" do
source = "(+ 1 2)"
opts = [file: "test.clje"]
case CljElixir.Compiler.compile_string(source, opts) do
{:ok, _ast} -> :ok
{:error, _diagnostics} -> :ok
end
end
end
describe "eval_string/2" do
test "returns {:ok, result, bindings} or {:error, diagnostics}" do
source = "(+ 1 2)"
case CljElixir.Compiler.eval_string(source) do
{:ok, result, bindings} ->
assert result == 3
assert is_list(bindings)
{:error, diagnostics} ->
# Expected when Reader/Transformer are not yet implemented
assert is_list(diagnostics)
end
end
end
describe "compile_to_beam/2" do
test "returns {:ok, modules} or {:error, diagnostics}" do
source = ~S"""
(defmodule TestBeamCompile
(defn hello [] :world))
"""
case CljElixir.Compiler.compile_to_beam(source) do
{:ok, modules} ->
assert is_list(modules)
assert Enum.any?(modules, fn {mod, _binary} ->
mod == TestBeamCompile
end)
{:error, diagnostics} ->
# Expected when Reader/Transformer are not yet implemented
assert is_list(diagnostics)
end
end
end
describe "compile_file/2" do
test "reads file and compiles" do
# Write a temp file
tmp_dir = System.tmp_dir!()
path = Path.join(tmp_dir, "test_compile_#{System.unique_integer([:positive])}.clje")
File.write!(path, "(+ 1 2)")
try do
case CljElixir.Compiler.compile_file(path) do
{:ok, _ast} -> :ok
{:error, _diagnostics} -> :ok
end
after
File.rm(path)
end
end
test "returns error for nonexistent file" do
{:error, [diag | _]} = CljElixir.Compiler.compile_file("/does/not/exist.clje")
assert diag.severity == :error
assert diag.message =~ "could not read file"
end
end
describe "eval_file/2" do
test "reads file, compiles, and evaluates" do
tmp_dir = System.tmp_dir!()
path = Path.join(tmp_dir, "test_eval_#{System.unique_integer([:positive])}.clje")
File.write!(path, "(+ 1 2)")
try do
case CljElixir.Compiler.eval_file(path) do
{:ok, 3, _bindings} -> :ok
{:ok, _result, _bindings} -> :ok
{:error, _diagnostics} -> :ok
end
after
File.rm(path)
end
end
end
describe "compile_file_to_beam/2" do
test "compiles file and writes .beam output" do
tmp_dir = System.tmp_dir!()
source_path = Path.join(tmp_dir, "test_beam_#{System.unique_integer([:positive])}.clje")
output_dir = Path.join(tmp_dir, "beam_output_#{System.unique_integer([:positive])}")
File.write!(source_path, ~S"""
(defmodule TestBeamOutput
(defn greet [] "hi"))
""")
try do
case CljElixir.Compiler.compile_file_to_beam(source_path, output_dir: output_dir) do
{:ok, modules} ->
assert is_list(modules)
# Check .beam files were written
beam_files = Path.wildcard(Path.join(output_dir, "*.beam"))
assert length(beam_files) > 0
{:error, _diagnostics} ->
# Expected when Reader/Transformer are not yet implemented
:ok
end
after
File.rm(source_path)
File.rm_rf(output_dir)
end
end
end
end
+501
View File
@@ -0,0 +1,501 @@
defmodule CljElixir.MalliTest do
use ExUnit.Case, async: true
alias CljElixir.Malli
# Atoms that need quoted syntax in Elixir source
@arrow :"=>"
@opt :"?"
# ── Helper ──────────────────────────────────────────────────────────
defp string_t do
{{:., [], [{:__aliases__, [alias: false], [:String]}, :t]}, [], []}
end
defp mapset_t do
{{:., [], [{:__aliases__, [alias: false], [:MapSet]}, :t]}, [], []}
end
defp pv_t do
{{:., [], [{:__aliases__, [alias: false], [:CljElixir, :PersistentVector]}, :t]}, [], []}
end
# ── Primitive types ─────────────────────────────────────────────────
describe "primitive types" do
test "string" do
assert string_t() == Malli.schema_to_typespec(:string)
end
test "int" do
assert {:integer, [], []} = Malli.schema_to_typespec(:int)
end
test "integer" do
assert {:integer, [], []} = Malli.schema_to_typespec(:integer)
end
test "float" do
assert {:float, [], []} = Malli.schema_to_typespec(:float)
end
test "number" do
assert {:number, [], []} = Malli.schema_to_typespec(:number)
end
test "boolean" do
assert {:boolean, [], []} = Malli.schema_to_typespec(:boolean)
end
test "atom" do
assert {:atom, [], []} = Malli.schema_to_typespec(:atom)
end
test "keyword" do
assert {:atom, [], []} = Malli.schema_to_typespec(:keyword)
end
test "any" do
assert {:any, [], []} = Malli.schema_to_typespec(:any)
end
test "nil" do
assert nil == Malli.schema_to_typespec(:nil)
end
test "pid" do
assert {:pid, [], []} = Malli.schema_to_typespec(:pid)
end
test "port" do
assert {:port, [], []} = Malli.schema_to_typespec(:port)
end
test "reference" do
assert {:reference, [], []} = Malli.schema_to_typespec(:reference)
end
test "pos-int" do
assert {:pos_integer, [], []} = Malli.schema_to_typespec(:"pos-int")
end
test "neg-int" do
assert {:neg_integer, [], []} = Malli.schema_to_typespec(:"neg-int")
end
test "nat-int" do
assert {:non_neg_integer, [], []} = Malli.schema_to_typespec(:"nat-int")
end
end
# ── Compound types ─────────────────────────────────────────────────
describe "compound types" do
test "or with two types" do
ast = Malli.schema_to_typespec([:or, :int, :string])
expected_string = string_t()
assert {:|, [], [{:integer, [], []}, ^expected_string]} = ast
end
test "or with three types (right-associative)" do
ast = Malli.schema_to_typespec([:or, :int, :string, :boolean])
assert {:|, [], [{:integer, [], []}, {:|, [], [_, {:boolean, [], []}]}]} = ast
end
test "maybe type" do
ast = Malli.schema_to_typespec([:maybe, :string])
expected_string = string_t()
assert {:|, [], [^expected_string, nil]} = ast
end
test "enum type" do
ast = Malli.schema_to_typespec([:enum, :a, :b, :c])
assert {:|, [], [:a, {:|, [], [:b, :c]}]} = ast
end
test "enum with single value" do
assert :a = Malli.schema_to_typespec([:enum, :a])
end
test "= literal" do
assert :hello = Malli.schema_to_typespec([:=, :hello])
assert 42 = Malli.schema_to_typespec([:=, 42])
end
test "and with base type and constraint (general)" do
ast = Malli.schema_to_typespec([:and, :int, [:<, 100]])
assert {:integer, [], []} = ast
end
test "and :int [:> 0] produces pos_integer()" do
ast = Malli.schema_to_typespec([:and, :int, [:>, 0]])
assert {:pos_integer, [], []} = ast
end
test "and :int [:>= 0] produces non_neg_integer()" do
ast = Malli.schema_to_typespec([:and, :int, [:>=, 0]])
assert {:non_neg_integer, [], []} = ast
end
end
# ── Container types ────────────────────────────────────────────────
describe "container types" do
test "map with fields" do
ast = Malli.schema_to_typespec([:map, [:name, :string], [:age, :int]])
assert {:%{}, [], kv} = ast
assert Keyword.has_key?(kv, :name)
assert Keyword.has_key?(kv, :age)
expected_string = string_t()
assert ^expected_string = Keyword.get(kv, :name)
assert {:integer, [], []} = Keyword.get(kv, :age)
end
test "map with optional field" do
ast = Malli.schema_to_typespec([:map, [:name, :string], [:email, {:optional, true}, :string]])
assert {:%{}, [], kv} = ast
assert Keyword.has_key?(kv, :name)
assert Keyword.has_key?(kv, :email)
end
test "map-of" do
ast = Malli.schema_to_typespec([:"map-of", :string, :int])
assert {:%{}, [], [optional_entry]} = ast
assert {{:optional, [], [_key_t]}, {:integer, [], []}} = optional_entry
end
test "list" do
ast = Malli.schema_to_typespec([:list, :int])
assert [{:integer, [], []}] = ast
end
test "vector" do
ast = Malli.schema_to_typespec([:vector, :int])
assert ^ast = pv_t()
end
test "set" do
ast = Malli.schema_to_typespec([:set, :int])
assert ^ast = mapset_t()
end
test "tuple" do
ast = Malli.schema_to_typespec([:tuple, :int, :string])
expected_string = string_t()
assert {:{}, [], [{:integer, [], []}, ^expected_string]} = ast
end
test "tuple with three elements" do
ast = Malli.schema_to_typespec([:tuple, :int, :string, :boolean])
assert {:{}, [], [{:integer, [], []}, _, {:boolean, [], []}]} = ast
end
end
# ── Function specs ─────────────────────────────────────────────────
describe "function specs" do
test "simple function spec" do
specs = Malli.spec_ast(:hello, [@arrow, [:cat, :string], :string])
assert length(specs) == 1
[{:@, [], [{:spec, [], [spec_body]}]}] = specs
assert {:"::", [], [{:hello, [], [_arg]}, _ret]} = spec_body
end
test "function with two params" do
specs = Malli.spec_ast(:add, [@arrow, [:cat, :int, :int], :int])
assert length(specs) == 1
[{:@, [], [{:spec, [], [{:"::", [], [{:add, [], args}, _ret]}]}]}] = specs
assert length(args) == 2
end
test "function with optional param produces two specs" do
specs = Malli.spec_ast(:greet, [@arrow, [:cat, :string, [@opt, :string]], :string])
assert length(specs) == 2
arities = Enum.map(specs, fn
{:@, [], [{:spec, [], [{:"::", [], [{:greet, [], args}, _ret]}]}]} ->
length(args)
end)
assert Enum.sort(arities) == [1, 2]
end
test "function with multiple optional params" do
specs = Malli.spec_ast(:f, [@arrow, [:cat, :int, [@opt, :string], [@opt, :boolean]], :any])
assert length(specs) == 3
arities = Enum.map(specs, fn
{:@, [], [{:spec, [], [{:"::", [], [{:f, [], args}, _ret]}]}]} ->
length(args)
end)
assert Enum.sort(arities) == [1, 2, 3]
end
test "multi-arity function via :function" do
specs =
Malli.spec_ast(:greet, [
:function,
[@arrow, [:cat, :string], :string],
[@arrow, [:cat, :string, :string], :string]
])
assert length(specs) == 2
arities = Enum.map(specs, fn
{:@, [], [{:spec, [], [{:"::", [], [{:greet, [], args}, _ret]}]}]} ->
length(args)
end)
assert Enum.sort(arities) == [1, 2]
end
end
# ── Type generation ────────────────────────────────────────────────
describe "type generation" do
test "named type from map schema" do
ast = Malli.type_ast(:user, [:map, [:name, :string], [:age, :int]])
assert {:@, [], [{:type, [], [{:"::", [], [{:user, [], []}, _map_type]}]}]} = ast
end
test "named type from primitive" do
ast = Malli.type_ast(:name, :string)
expected_string = string_t()
assert {:@, [], [{:type, [], [{:"::", [], [{:name, [], []}, ^expected_string]}]}]} = ast
end
end
# ── Schema references ─────────────────────────────────────────────
describe "schema references" do
test "known type reference" do
ast = Malli.schema_to_typespec("User", known_types: %{"User" => :user})
assert {:user, [], []} = ast
end
test "unknown string reference falls back to any()" do
ast = Malli.schema_to_typespec("Unknown")
assert {:any, [], []} = ast
end
end
# ── Recursive types ───────────────────────────────────────────────
describe "recursive types" do
test "ref produces type call" do
ast = Malli.schema_to_typespec([:ref, :tree], registry: %{tree: [:or, :int, :nil]})
assert {:tree, [], []} = ast
end
test "schema with registry via type_ast/2 dispatches to type_ast/3" do
schema = [
:schema,
%{registry: %{tree: [:or, :int, [:tuple, [:ref, :tree], [:ref, :tree]]]}},
[:ref, :tree]
]
types = Malli.type_ast(:tree, schema)
assert is_list(types)
assert length(types) >= 1
[{:@, [], [{:type, [], [{:"::", [], [{:tree, [], []}, _body]}]}]}] = types
end
test "schema with registry via type_ast/3 with list of pairs" do
registry = [{:tree, [:or, :int, [:tuple, [:ref, :tree], [:ref, :tree]]]}]
schema = [:schema, %{registry: Map.new(registry)}, [:ref, :tree]]
types = Malli.type_ast(:tree, schema, registry)
assert is_list(types)
assert length(types) == 1
end
end
# ── Compilation smoke tests ───────────────────────────────────────
describe "compilation smoke test" do
test "generated spec compiles in a module" do
spec_asts = Malli.spec_ast(:hello, [@arrow, [:cat, :string], :string])
fun_ast =
{:def, [], [
{:hello, [], [{:name, [], nil}]},
[do: {:name, [], nil}]
]}
module_body = spec_asts ++ [fun_ast]
block = {:__block__, [], module_body}
module_ast =
{:defmodule, [context: Elixir],
[{:__aliases__, [alias: false], [:MalliSmokeTest1]}, [do: block]]}
assert [{MalliSmokeTest1, _binary}] = Code.compile_quoted(module_ast)
after
:code.purge(MalliSmokeTest1)
:code.delete(MalliSmokeTest1)
end
test "generated type compiles in a module" do
type_ast = Malli.type_ast(:user, [:map, [:name, :string], [:age, :int]])
module_ast =
{:defmodule, [context: Elixir],
[{:__aliases__, [alias: false], [:MalliSmokeTest2]}, [do: type_ast]]}
assert [{MalliSmokeTest2, _binary}] = Code.compile_quoted(module_ast)
after
:code.purge(MalliSmokeTest2)
:code.delete(MalliSmokeTest2)
end
test "multi-arity spec compiles" do
specs =
Malli.spec_ast(:greet, [
:function,
[@arrow, [:cat, :string], :string],
[@arrow, [:cat, :string, :string], :string]
])
fun1 =
{:def, [], [
{:greet, [], [{:name, [], nil}]},
[do: {:name, [], nil}]
]}
fun2 =
{:def, [], [
{:greet, [], [{:greeting, [], nil}, {:name, [], nil}]},
[do: {:name, [], nil}]
]}
module_body = specs ++ [fun1, fun2]
block = {:__block__, [], module_body}
module_ast =
{:defmodule, [context: Elixir],
[{:__aliases__, [alias: false], [:MalliSmokeTest3]}, [do: block]]}
assert [{MalliSmokeTest3, _binary}] = Code.compile_quoted(module_ast)
after
:code.purge(MalliSmokeTest3)
:code.delete(MalliSmokeTest3)
end
test "map-of type compiles" do
type_ast = Malli.type_ast(:counts, [:"map-of", :string, :int])
module_ast =
{:defmodule, [context: Elixir],
[{:__aliases__, [alias: false], [:MalliSmokeTest4]}, [do: type_ast]]}
assert [{MalliSmokeTest4, _binary}] = Code.compile_quoted(module_ast)
after
:code.purge(MalliSmokeTest4)
:code.delete(MalliSmokeTest4)
end
test "tuple type compiles" do
type_ast = Malli.type_ast(:point, [:tuple, :int, :int])
module_ast =
{:defmodule, [context: Elixir],
[{:__aliases__, [alias: false], [:MalliSmokeTest5]}, [do: type_ast]]}
assert [{MalliSmokeTest5, _binary}] = Code.compile_quoted(module_ast)
after
:code.purge(MalliSmokeTest5)
:code.delete(MalliSmokeTest5)
end
test "list type compiles" do
type_ast = Malli.type_ast(:names, [:list, :string])
module_ast =
{:defmodule, [context: Elixir],
[{:__aliases__, [alias: false], [:MalliSmokeTest6]}, [do: type_ast]]}
assert [{MalliSmokeTest6, _binary}] = Code.compile_quoted(module_ast)
after
:code.purge(MalliSmokeTest6)
:code.delete(MalliSmokeTest6)
end
test "or type compiles" do
type_ast = Malli.type_ast(:string_or_int, [:or, :string, :int])
module_ast =
{:defmodule, [context: Elixir],
[{:__aliases__, [alias: false], [:MalliSmokeTest7]}, [do: type_ast]]}
assert [{MalliSmokeTest7, _binary}] = Code.compile_quoted(module_ast)
after
:code.purge(MalliSmokeTest7)
:code.delete(MalliSmokeTest7)
end
test "maybe type compiles" do
type_ast = Malli.type_ast(:opt_string, [:maybe, :string])
module_ast =
{:defmodule, [context: Elixir],
[{:__aliases__, [alias: false], [:MalliSmokeTest8]}, [do: type_ast]]}
assert [{MalliSmokeTest8, _binary}] = Code.compile_quoted(module_ast)
after
:code.purge(MalliSmokeTest8)
:code.delete(MalliSmokeTest8)
end
test "recursive type compiles" do
schema = [
:schema,
%{registry: %{tree: [:or, :int, [:tuple, [:ref, :tree], [:ref, :tree]]]}},
[:ref, :tree]
]
types = Malli.type_ast(:tree, schema)
block = {:__block__, [], types}
module_ast =
{:defmodule, [context: Elixir],
[{:__aliases__, [alias: false], [:MalliSmokeTest9]}, [do: block]]}
assert [{MalliSmokeTest9, _binary}] = Code.compile_quoted(module_ast)
after
:code.purge(MalliSmokeTest9)
:code.delete(MalliSmokeTest9)
end
test "optional params spec compiles" do
specs =
Malli.spec_ast(:greet, [@arrow, [:cat, :string, [@opt, :string]], :string])
fun1 =
{:def, [], [
{:greet, [], [{:name, [], nil}]},
[do: {:name, [], nil}]
]}
fun2 =
{:def, [], [
{:greet, [], [{:name, [], nil}, {:greeting, [], nil}]},
[do: {:name, [], nil}]
]}
module_body = specs ++ [fun1, fun2]
block = {:__block__, [], module_body}
module_ast =
{:defmodule, [context: Elixir],
[{:__aliases__, [alias: false], [:MalliSmokeTest10]}, [do: block]]}
assert [{MalliSmokeTest10, _binary}] = Code.compile_quoted(module_ast)
after
:code.purge(MalliSmokeTest10)
:code.delete(MalliSmokeTest10)
end
end
end
+417
View File
@@ -0,0 +1,417 @@
defmodule CljElixir.Phase2Test do
use ExUnit.Case, async: false
# Helper to compile and evaluate CljElixir code
# Uses vector_as_list: true until PersistentVector is implemented (Phase 3 WS-3)
defp eval!(source) do
case CljElixir.Compiler.eval_string(source, vector_as_list: true) do
{:ok, result, _bindings} -> result
{:error, errors} -> raise "CljElixir eval error: #{inspect(errors)}"
end
end
# Protocols and core modules are compiled by the Mix compiler plugin
# (compilers: [..., :clj_elixir] in mix.exs). No setup needed.
# ==========================================================================
# ILookup - get
# ==========================================================================
describe "get (ILookup)" do
test "get from map with existing key" do
assert eval!("(get {:a 1 :b 2} :a)") == 1
end
test "get from map with missing key returns nil" do
assert eval!("(get {:a 1} :b)") == nil
end
test "get from map with missing key and default" do
assert eval!("(get {:a 1} :b 42)") == 42
end
test "get from map with existing key ignores default" do
assert eval!("(get {:a 1} :a 42)") == 1
end
end
# ==========================================================================
# IAssociative - assoc, contains?
# ==========================================================================
describe "assoc (IAssociative)" do
test "assoc adds new key to map" do
assert eval!("(assoc {:a 1} :b 2)") == %{a: 1, b: 2}
end
test "assoc updates existing key" do
assert eval!("(assoc {:a 1} :a 2)") == %{a: 2}
end
test "assoc on empty map" do
assert eval!("(assoc {} :a 1)") == %{a: 1}
end
end
describe "contains? (IAssociative)" do
test "contains? returns true for existing key" do
assert eval!("(contains? {:a 1 :b 2} :a)") == true
end
test "contains? returns false for missing key" do
assert eval!("(contains? {:a 1} :c)") == false
end
end
# ==========================================================================
# IMap - dissoc
# ==========================================================================
describe "dissoc (IMap)" do
test "dissoc removes key from map" do
assert eval!("(dissoc {:a 1 :b 2} :a)") == %{b: 2}
end
test "dissoc with missing key returns same map" do
assert eval!("(dissoc {:a 1} :b)") == %{a: 1}
end
end
# ==========================================================================
# ICounted - count
# ==========================================================================
describe "count (ICounted)" do
test "count of map" do
assert eval!("(count {:a 1 :b 2 :c 3})") == 3
end
test "count of list" do
assert eval!("(count (list 1 2 3))") == 3
end
test "count of empty map" do
assert eval!("(count {})") == 0
end
test "count of tuple" do
assert eval!("(count #el[1 2 3])") == 3
end
test "count of string" do
assert eval!("(count \"hello\")") == 5
end
end
# ==========================================================================
# ISeq - first, rest
# ==========================================================================
describe "first/rest (ISeq)" do
test "first of list" do
assert eval!("(first (list 1 2 3))") == 1
end
test "rest of list" do
assert eval!("(rest (list 1 2 3))") == [2, 3]
end
test "first of empty list" do
assert eval!("(first (list))") == nil
end
test "rest of empty list" do
assert eval!("(rest (list))") == []
end
end
# ==========================================================================
# ISeqable - seq
# ==========================================================================
describe "seq (ISeqable)" do
test "seq of non-empty list returns the list" do
assert eval!("(seq (list 1 2 3))") == [1, 2, 3]
end
test "seq of empty list returns nil" do
assert eval!("(seq (list))") == nil
end
test "seq of map returns key-value pairs" do
result = eval!("(seq {:a 1})")
assert is_list(result)
assert length(result) == 1
end
end
# ==========================================================================
# ICollection - conj
# ==========================================================================
describe "conj (ICollection)" do
test "conj onto list prepends" do
assert eval!("(conj (list 2 3) 1)") == [1, 2, 3]
end
test "conj onto map merges tuple entry" do
result = eval!("(conj {:a 1} {:b 2})")
assert result == %{a: 1, b: 2}
end
end
# ==========================================================================
# IIndexed - nth
# ==========================================================================
describe "nth (IIndexed)" do
test "nth from tuple" do
assert eval!("(nth #el[10 20 30] 1)") == 20
end
test "nth with default" do
assert eval!("(nth #el[10 20] 5 :not-found)") == :"not-found"
end
end
# ==========================================================================
# IStack - peek, pop (via protocol on List)
# ==========================================================================
# Note: peek and pop are not in the builtin dispatch yet, they go through
# the protocol directly - skip for now unless dispatch was added
# ==========================================================================
# Sequence wrapper functions
# ==========================================================================
describe "map (sequence function)" do
test "map over list" do
assert eval!("(map (fn [x] (inc x)) (list 1 2 3))") == [2, 3, 4]
end
end
describe "filter" do
test "filter list" do
assert eval!("(filter (fn [x] (> x 2)) (list 1 2 3 4))") == [3, 4]
end
end
describe "reduce" do
test "reduce with initial value" do
assert eval!("(reduce (fn [a b] (+ a b)) 0 (list 1 2 3))") == 6
end
test "reduce without initial value" do
assert eval!("(reduce (fn [a b] (+ a b)) (list 1 2 3))") == 6
end
end
describe "concat" do
test "concat two lists" do
assert eval!("(concat (list 1 2) (list 3 4))") == [1, 2, 3, 4]
end
end
describe "take and drop" do
test "take from list" do
assert eval!("(take 2 (list 1 2 3 4))") == [1, 2]
end
test "drop from list" do
assert eval!("(drop 2 (list 1 2 3 4))") == [3, 4]
end
end
describe "sort" do
test "sort list" do
assert eval!("(sort (list 3 1 2))") == [1, 2, 3]
end
end
describe "distinct" do
test "distinct removes duplicates" do
assert eval!("(distinct (list 1 2 1 3 2))") == [1, 2, 3]
end
end
describe "frequencies" do
test "frequencies counts occurrences" do
assert eval!("(frequencies (list :a :b :a :c :b :a))") == %{a: 3, b: 2, c: 1}
end
end
describe "partition" do
test "partition into chunks" do
assert eval!("(partition 2 (list 1 2 3 4))") == [[1, 2], [3, 4]]
end
end
describe "mapcat" do
test "mapcat flattens results" do
assert eval!("(mapcat (fn [x] (list x x)) (list 1 2 3))") == [1, 1, 2, 2, 3, 3]
end
end
# ==========================================================================
# Map-specific functions
# ==========================================================================
describe "keys" do
test "keys of map" do
result = eval!("(keys {:a 1 :b 2})")
assert Enum.sort(result) == [:a, :b]
end
end
describe "vals" do
test "vals of map" do
result = eval!("(vals {:a 1 :b 2})")
assert Enum.sort(result) == [1, 2]
end
end
describe "merge" do
test "merge two maps" do
assert eval!("(merge {:a 1} {:b 2})") == %{a: 1, b: 2}
end
test "merge with overwrite" do
assert eval!("(merge {:a 1} {:a 2})") == %{a: 2}
end
end
describe "select-keys" do
test "select-keys from map" do
assert eval!("(select-keys {:a 1 :b 2 :c 3} (list :a :c))") == %{a: 1, c: 3}
end
end
describe "into" do
test "into map from list of tuples" do
assert eval!("(into {} (list #el[:a 1] #el[:b 2]))") == %{a: 1, b: 2}
end
end
# ==========================================================================
# update
# ==========================================================================
describe "update" do
test "update map value with function" do
assert eval!("(update {:a 1} :a (fn [x] (inc x)))") == %{a: 2}
end
end
# ==========================================================================
# empty?
# ==========================================================================
describe "empty?" do
test "empty? on empty map" do
assert eval!("(empty? {})") == true
end
test "empty? on non-empty map" do
assert eval!("(empty? {:a 1})") == false
end
end
# ==========================================================================
# Keyword-as-function through ILookup
# ==========================================================================
describe "keyword-as-function" do
test "keyword as function on map" do
assert eval!("(:name {:name \"Ada\"})") == "Ada"
end
test "keyword with default value" do
assert eval!("(:age {:name \"Ada\"} 25)") == 25
end
end
# ==========================================================================
# Compound functions (get-in, assoc-in, update-in)
# ==========================================================================
describe "get-in" do
test "get-in nested map" do
assert eval!("(get-in {:a {:b {:c 42}}} (list :a :b :c))") == 42
end
test "get-in with missing key" do
assert eval!("(get-in {:a {:b 1}} (list :a :c))") == nil
end
end
describe "assoc-in" do
test "assoc-in nested map" do
assert eval!("(assoc-in {:a {:b 1}} (list :a :b) 2)") == %{a: %{b: 2}}
end
end
describe "update-in" do
test "update-in nested map" do
assert eval!("(update-in {:a {:b 1}} (list :a :b) (fn [x] (inc x)))") == %{a: %{b: 2}}
end
end
# ==========================================================================
# reduce-kv
# ==========================================================================
describe "reduce-kv" do
test "reduce-kv over map" do
# Reduce a map collecting keys into a list
result = eval!("(reduce-kv (fn [acc _k v] (+ acc v)) 0 {:a 1 :b 2 :c 3})")
assert result == 6
end
end
# ==========================================================================
# FFI ? and ! preservation
# ==========================================================================
describe "FFI name munging fix" do
test "Map/has-key? works correctly" do
assert eval!("(Map/has-key? {:a 1} :a)") == true
end
test "Map/has-key? returns false for missing key" do
assert eval!("(Map/has-key? {:a 1} :b)") == false
end
end
# ==========================================================================
# End-to-end integration
# ==========================================================================
describe "end-to-end integration" do
test "realistic data transformation pipeline" do
source = """
(let [data (list {:name "Alice" :age 30}
{:name "Bob" :age 25}
{:name "Carol" :age 35})]
(map (fn [p] (:name p)) (filter (fn [p] (> (get p :age) 28)) data)))
"""
assert eval!(source) == ["Alice", "Carol"]
end
test "nested map operations" do
source = """
(let [m {:a 1 :b 2 :c 3}]
(dissoc (assoc m :d 4) :b))
"""
assert eval!(source) == %{a: 1, c: 3, d: 4}
end
test "count and empty? together" do
source = """
(let [m {:a 1}]
(list (count m) (empty? m) (empty? {})))
"""
assert eval!(source) == [1, false, true]
end
end
end
+452
View File
@@ -0,0 +1,452 @@
defmodule CljElixir.Phase3Test do
use ExUnit.Case, async: false
# Evaluate CljElixir code with PersistentVector enabled (no vector_as_list flag)
defp eval!(source) do
case CljElixir.Compiler.eval_string(source) do
{:ok, result, _bindings} -> result
{:error, errors} -> raise "CljElixir eval error: #{inspect(errors)}"
end
end
# ==========================================================================
# Vector literal construction
# ==========================================================================
describe "vector literal construction" do
test "vector literal produces PersistentVector" do
result = eval!("[1 2 3]")
assert result.__struct__ == CljElixir.PersistentVector
end
test "empty vector literal produces PersistentVector" do
result = eval!("[]")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.pv_count(result) == 0
end
test "single element vector" do
result = eval!("[42]")
assert CljElixir.PersistentVector.pv_count(result) == 1
assert CljElixir.PersistentVector.pv_nth(result, 0) == 42
end
test "nested vectors" do
result = eval!("[[1 2] [3 4]]")
assert CljElixir.PersistentVector.pv_count(result) == 2
inner = CljElixir.PersistentVector.pv_nth(result, 0)
assert inner.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.pv_nth(inner, 0) == 1
end
test "vector with mixed types" do
result = eval!("[1 :two \"three\" nil true]")
assert CljElixir.PersistentVector.pv_count(result) == 5
assert CljElixir.PersistentVector.pv_nth(result, 0) == 1
assert CljElixir.PersistentVector.pv_nth(result, 1) == :two
assert CljElixir.PersistentVector.pv_nth(result, 2) == "three"
assert CljElixir.PersistentVector.pv_nth(result, 3) == nil
assert CljElixir.PersistentVector.pv_nth(result, 4) == true
end
end
# ==========================================================================
# vec and vector builtins
# ==========================================================================
describe "vec and vector builtins" do
test "vec converts list to PersistentVector" do
result = eval!("(vec (list 1 2 3))")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.pv_count(result) == 3
end
test "vector creates PersistentVector from args" do
result = eval!("(vector 4 5 6)")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(result) == [4, 5, 6]
end
test "vector with no args creates empty vector" do
result = eval!("(vector)")
assert CljElixir.PersistentVector.pv_count(result) == 0
end
test "vector? returns true for PersistentVector" do
assert eval!("(vector? [1 2 3])") == true
end
test "vector? returns false for list" do
assert eval!("(vector? (list 1 2 3))") == false
end
test "vector? returns false for map" do
assert eval!("(vector? {:a 1})") == false
end
end
# ==========================================================================
# Protocol dispatch — indexed access
# ==========================================================================
describe "indexed access" do
test "nth on vector" do
assert eval!("(nth [10 20 30] 0)") == 10
assert eval!("(nth [10 20 30] 1)") == 20
assert eval!("(nth [10 20 30] 2)") == 30
end
test "nth with not-found" do
assert eval!("(nth [10 20 30] 5 :missing)") == :missing
end
test "get on vector" do
assert eval!("(get [10 20 30] 1)") == 20
end
test "get with not-found" do
assert eval!("(get [10 20 30] 5 :missing)") == :missing
end
test "get with non-integer key returns nil" do
assert eval!("(get [10 20 30] :foo)") == nil
end
end
# ==========================================================================
# Protocol dispatch — mutation operations
# ==========================================================================
describe "mutation operations" do
test "conj appends to vector" do
result = eval!("(conj [1 2] 3)")
assert CljElixir.PersistentVector.to_list(result) == [1, 2, 3]
end
test "assoc updates index" do
result = eval!("(assoc [1 2 3] 1 :x)")
assert CljElixir.PersistentVector.to_list(result) == [1, :x, 3]
end
test "assoc at end appends" do
result = eval!("(assoc [1 2] 2 3)")
assert CljElixir.PersistentVector.to_list(result) == [1, 2, 3]
end
test "contains? with valid index" do
assert eval!("(contains? [1 2 3] 0)") == true
assert eval!("(contains? [1 2 3] 2)") == true
end
test "contains? with invalid index" do
assert eval!("(contains? [1 2 3] 3)") == false
assert eval!("(contains? [1 2 3] -1)") == false
end
end
# ==========================================================================
# Protocol dispatch — sequence operations
# ==========================================================================
describe "sequence operations" do
test "count on vector" do
assert eval!("(count [1 2 3])") == 3
assert eval!("(count [])") == 0
end
test "first on vector" do
assert eval!("(first [10 20 30])") == 10
end
test "first on empty vector" do
assert eval!("(first [])") == nil
end
test "seq on non-empty vector returns list" do
result = eval!("(seq [1 2 3])")
assert is_list(result)
assert result == [1, 2, 3]
end
test "seq on empty vector returns nil" do
assert eval!("(seq [])") == nil
end
test "empty? on vector" do
assert eval!("(empty? [])") == true
assert eval!("(empty? [1])") == false
end
end
# ==========================================================================
# Stack operations (peek/pop)
# ==========================================================================
describe "stack operations" do
test "peek returns last element" do
assert eval!("(peek [10 20 30])") == 30
end
test "pop removes last element" do
result = eval!("(pop [1 2 3])")
assert CljElixir.PersistentVector.to_list(result) == [1, 2]
end
test "pop single element returns empty" do
result = eval!("(pop [42])")
assert CljElixir.PersistentVector.pv_count(result) == 0
end
end
# ==========================================================================
# Vector as function (IFn)
# ==========================================================================
describe "vector as function" do
# TODO: Vector-as-function requires transformer support for struct invocation.
# The IFn protocol is implemented but Elixir doesn't auto-dispatch when a
# struct is in call position. Needs a transformer change to detect and wrap
# non-function call heads with IFn dispatch.
end
# ==========================================================================
# Pattern matching (unchanged — vectors match tuples in patterns)
# ==========================================================================
describe "pattern matching" do
test "vector in case pattern matches tuple" do
assert eval!("(case #el[:ok 42] [:ok x] x)") == 42
end
test "vector in let pattern matches tuple" do
assert eval!("(let [[:ok x] #el[:ok 99]] x)") == 99
end
test "nested vector patterns" do
result = eval!("""
(case #el[:ok #el[:inner 5]]
[:ok [:inner n]] n
_ nil)
""")
assert result == 5
end
end
# ==========================================================================
# Metadata
# ==========================================================================
describe "metadata" do
test "empty vector has nil meta" do
result = eval!("[]")
assert Map.get(result, :meta) == nil
end
end
# ==========================================================================
# Boundary conditions (trie level transitions)
# ==========================================================================
describe "boundary conditions" do
test "32 element vector (full tail, no trie)" do
v = CljElixir.PersistentVector.from_list(Enum.to_list(1..32))
assert CljElixir.PersistentVector.pv_count(v) == 32
assert CljElixir.PersistentVector.pv_nth(v, 0) == 1
assert CljElixir.PersistentVector.pv_nth(v, 31) == 32
end
test "33 element vector (trie overflow)" do
v = CljElixir.PersistentVector.from_list(Enum.to_list(1..33))
assert CljElixir.PersistentVector.pv_count(v) == 33
assert CljElixir.PersistentVector.pv_nth(v, 0) == 1
assert CljElixir.PersistentVector.pv_nth(v, 32) == 33
end
test "1025 element vector (multi-level trie)" do
v = CljElixir.PersistentVector.from_list(Enum.to_list(1..1025))
assert CljElixir.PersistentVector.pv_count(v) == 1025
assert CljElixir.PersistentVector.pv_nth(v, 0) == 1
assert CljElixir.PersistentVector.pv_nth(v, 1024) == 1025
end
test "conj across 32-element boundary" do
v32 = CljElixir.PersistentVector.from_list(Enum.to_list(1..32))
v33 = CljElixir.PersistentVector.pv_conj(v32, 33)
assert CljElixir.PersistentVector.pv_count(v33) == 33
assert CljElixir.PersistentVector.pv_nth(v33, 32) == 33
# Original unchanged (structural sharing)
assert CljElixir.PersistentVector.pv_count(v32) == 32
end
test "pop across 33-to-32 boundary" do
v33 = CljElixir.PersistentVector.from_list(Enum.to_list(1..33))
v32 = CljElixir.PersistentVector.pv_pop(v33)
assert CljElixir.PersistentVector.pv_count(v32) == 32
assert CljElixir.PersistentVector.pv_nth(v32, 31) == 32
end
test "assoc in trie (not tail)" do
v33 = CljElixir.PersistentVector.from_list(Enum.to_list(1..33))
v33b = CljElixir.PersistentVector.pv_assoc(v33, 0, :first)
assert CljElixir.PersistentVector.pv_nth(v33b, 0) == :first
assert CljElixir.PersistentVector.pv_nth(v33b, 1) == 2
# Original unchanged
assert CljElixir.PersistentVector.pv_nth(v33, 0) == 1
end
end
# ==========================================================================
# SubVector
# ==========================================================================
describe "subvec" do
test "subvec creates view into vector" do
sv = CljElixir.SubVector.sv_new(CljElixir.PersistentVector.from_list([1, 2, 3, 4, 5]), 1, 4)
assert CljElixir.SubVector.sv_count(sv) == 3
assert CljElixir.SubVector.sv_nth(sv, 0) == 2
assert CljElixir.SubVector.sv_nth(sv, 1) == 3
assert CljElixir.SubVector.sv_nth(sv, 2) == 4
end
test "subvec to_list" do
sv = CljElixir.SubVector.sv_new(CljElixir.PersistentVector.from_list([1, 2, 3, 4, 5]), 1, 4)
assert CljElixir.SubVector.sv_to_list(sv) == [2, 3, 4]
end
test "subvec 2-arity (start to end)" do
sv = CljElixir.SubVector.sv_new(CljElixir.PersistentVector.from_list([10, 20, 30]), 1)
assert CljElixir.SubVector.sv_count(sv) == 2
assert CljElixir.SubVector.sv_to_list(sv) == [20, 30]
end
test "subvec nth with not-found" do
sv = CljElixir.SubVector.sv_new(CljElixir.PersistentVector.from_list([1, 2, 3]), 0, 2)
assert CljElixir.SubVector.sv_nth(sv, 5, :missing) == :missing
end
end
# ==========================================================================
# Cross-type equality
# ==========================================================================
describe "cross-type equality" do
test "vector equals list with same elements" do
assert eval!("(= [1 2 3] (list 1 2 3))") == true
end
test "vector not equal to list with different elements" do
assert eval!("(= [1 2 3] (list 1 2 4))") == false
end
test "two vectors with same elements are equal" do
assert eval!("(= [1 2 3] [1 2 3])") == true
end
test "two vectors with different elements are not equal" do
assert eval!("(= [1 2] [1 2 3])") == false
end
test "not= works with cross-type" do
assert eval!("(not= [1 2 3] (list 1 2 3))") == false
assert eval!("(not= [1 2 3] (list 4 5 6))") == true
end
test "scalar equality still works" do
assert eval!("(= 1 1)") == true
assert eval!("(= 1 2)") == false
assert eval!("(= :a :a)") == true
end
end
# ==========================================================================
# Enumerable/Collectable protocols
# ==========================================================================
describe "Enumerable and Collectable" do
test "Enum.map over PersistentVector" do
pv = CljElixir.PersistentVector.from_list([1, 2, 3])
result = Enum.map(pv, &(&1 * 2))
assert result == [2, 4, 6]
end
test "Enum.filter over PersistentVector" do
pv = CljElixir.PersistentVector.from_list([1, 2, 3, 4, 5])
result = Enum.filter(pv, &(rem(&1, 2) == 0))
assert result == [2, 4]
end
test "Enum.count on PersistentVector" do
pv = CljElixir.PersistentVector.from_list([1, 2, 3])
assert Enum.count(pv) == 3
end
test "Enum.into PersistentVector" do
pv = Enum.into([1, 2, 3], CljElixir.PersistentVector.from_list([]))
assert pv.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(pv) == [1, 2, 3]
end
test "Enum.slice on PersistentVector" do
pv = CljElixir.PersistentVector.from_list([10, 20, 30, 40, 50])
assert Enum.slice(pv, 1, 3) == [20, 30, 40]
end
end
# ==========================================================================
# SubVector protocol dispatch
# ==========================================================================
describe "SubVector protocols" do
test "count via protocol" do
sv = CljElixir.SubVector.sv_new(CljElixir.PersistentVector.from_list([1, 2, 3, 4, 5]), 1, 4)
assert CljElixir.ICounted.count(sv) == 3
end
test "nth via protocol" do
sv = CljElixir.SubVector.sv_new(CljElixir.PersistentVector.from_list([10, 20, 30, 40]), 1, 3)
assert CljElixir.IIndexed.nth(sv, 0) == 20
assert CljElixir.IIndexed.nth(sv, 1) == 30
end
test "lookup via protocol" do
sv = CljElixir.SubVector.sv_new(CljElixir.PersistentVector.from_list([10, 20, 30]), 0, 2)
assert CljElixir.ILookup.lookup(sv, 0) == 10
assert CljElixir.ILookup.lookup(sv, 5) == nil
assert CljElixir.ILookup.lookup(sv, 5, :missing) == :missing
end
test "seq via protocol" do
sv = CljElixir.SubVector.sv_new(CljElixir.PersistentVector.from_list([1, 2, 3, 4]), 1, 3)
assert CljElixir.ISeqable.seq(sv) == [2, 3]
end
test "first/rest via protocol" do
sv = CljElixir.SubVector.sv_new(CljElixir.PersistentVector.from_list([10, 20, 30]), 0, 3)
assert CljElixir.ISeq.first(sv) == 10
assert CljElixir.ISeq.rest(sv) == [20, 30]
end
test "peek/pop via protocol" do
sv = CljElixir.SubVector.sv_new(CljElixir.PersistentVector.from_list([1, 2, 3, 4, 5]), 1, 4)
assert CljElixir.IStack.peek(sv) == 4
popped = CljElixir.IStack.pop(sv)
assert CljElixir.SubVector.sv_count(popped) == 2
end
end
# ==========================================================================
# Existing Phase 2 tests still pass (backward compatibility)
# ==========================================================================
describe "backward compatibility" do
test "maps still work with protocols" do
assert eval!("(get {:a 1 :b 2} :a)") == 1
assert eval!("(count {:a 1 :b 2})") == 2
end
test "lists still work with protocols" do
assert eval!("(first (list 10 20 30))") == 10
assert eval!("(count (list 1 2 3))") == 3
end
end
end
+392
View File
@@ -0,0 +1,392 @@
defmodule CljElixir.Phase4Test do
use ExUnit.Case, async: false
# Evaluate CljElixir code with PersistentVector enabled
defp eval!(source) do
case CljElixir.Compiler.eval_string(source) do
{:ok, result, _bindings} -> result
{:error, errors} -> raise "CljElixir eval error: #{inspect(errors)}"
end
end
# ==========================================================================
# tuple function
# ==========================================================================
describe "tuple function" do
test "empty tuple" do
result = eval!("(tuple)")
assert result == {}
end
test "single element tuple" do
result = eval!("(tuple :ok)")
assert result == {:ok}
end
test "two element tuple" do
result = eval!("(tuple :ok \"data\")")
assert result == {:ok, "data"}
end
test "three element tuple" do
result = eval!("(tuple 1 2 3)")
assert result == {1, 2, 3}
end
test "tuple with mixed types" do
result = eval!("(tuple :error 404 \"not found\")")
assert result == {:error, 404, "not found"}
end
test "tuple with nested tuple" do
result = eval!("(tuple :ok (tuple 1 2))")
assert result == {:ok, {1, 2}}
end
test "tuple-size on constructed tuple" do
result = eval!("(tuple-size (tuple :a :b :c))")
assert result == 3
end
test "elem on constructed tuple" do
result = eval!("(elem (tuple :a :b :c) 1)")
assert result == :b
end
test "tuple in let binding" do
result = eval!("""
(let [t (tuple :ok 42)]
(elem t 1))
""")
assert result == 42
end
test "tuple with expressions as arguments" do
result = eval!("(tuple (+ 1 2) (* 3 4))")
assert result == {3, 12}
end
end
# ==========================================================================
# clojurify
# ==========================================================================
describe "clojurify" do
test "tuple to vector" do
result = eval!("(clojurify #el[:ok \"data\"])")
assert result.__struct__ == CljElixir.PersistentVector
list = CljElixir.PersistentVector.to_list(result)
assert list == [:ok, "data"]
end
test "list to vector" do
result = eval!("(clojurify '(1 2 3))")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(result) == [1, 2, 3]
end
test "nested tuple deep conversion" do
result = eval!("(clojurify #el[:ok #el[:nested \"data\"]])")
assert result.__struct__ == CljElixir.PersistentVector
list = CljElixir.PersistentVector.to_list(result)
assert hd(list) == :ok
inner = hd(tl(list))
assert inner.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(inner) == [:nested, "data"]
end
test "map values walked" do
result = eval!("(clojurify {:a #el[1 2]})")
assert is_map(result)
inner = Map.get(result, :a)
assert inner.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(inner) == [1, 2]
end
test "vector idempotent" do
result = eval!("(clojurify [1 2 3])")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(result) == [1, 2, 3]
end
test "scalar passthrough - integer" do
assert eval!("(clojurify 42)") == 42
end
test "scalar passthrough - string" do
assert eval!("(clojurify \"hello\")") == "hello"
end
test "scalar passthrough - atom" do
assert eval!("(clojurify :foo)") == :foo
end
test "scalar passthrough - nil" do
assert eval!("(clojurify nil)") == nil
end
end
# ==========================================================================
# elixirify
# ==========================================================================
describe "elixirify" do
test "vector to list" do
result = eval!("(elixirify [1 2 3])")
assert is_list(result)
assert result == [1, 2, 3]
end
test "nested vector deep conversion" do
result = eval!("(elixirify [:ok [:nested \"data\"]])")
assert is_list(result)
assert result == [:ok, [:nested, "data"]]
end
test "map values walked" do
result = eval!("(elixirify {:a [1 2]})")
assert is_map(result)
assert Map.get(result, :a) == [1, 2]
end
test "list idempotent" do
result = eval!("(elixirify '(1 2 3))")
assert is_list(result)
assert result == [1, 2, 3]
end
test "tuple elements walked" do
result = eval!("(elixirify #el[:ok [1 2]])")
assert is_tuple(result)
assert elem(result, 0) == :ok
assert is_list(elem(result, 1))
assert elem(result, 1) == [1, 2]
end
test "scalar passthrough - integer" do
assert eval!("(elixirify 42)") == 42
end
test "scalar passthrough - string" do
assert eval!("(elixirify \"hello\")") == "hello"
end
test "scalar passthrough - atom" do
assert eval!("(elixirify :foo)") == :foo
end
end
# ==========================================================================
# Integration: roundtrips and composition
# ==========================================================================
describe "roundtrip conversions" do
test "clojurify then elixirify roundtrip on tuple" do
result = eval!("""
(elixirify (clojurify #el[:ok "data"]))
""")
assert is_list(result)
assert result == [:ok, "data"]
end
test "elixirify then clojurify roundtrip on vector" do
result = eval!("""
(clojurify (elixirify [1 2 3]))
""")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(result) == [1, 2, 3]
end
test "deep nested roundtrip" do
result = eval!("""
(elixirify (clojurify #el[:ok #el[1 #el[2 3]]]))
""")
assert is_list(result)
assert result == [:ok, [1, [2, 3]]]
end
test "map with nested roundtrip" do
result = eval!("""
(elixirify (clojurify {:a #el[1 2] :b #el[3 4]}))
""")
assert is_map(result)
assert Map.get(result, :a) == [1, 2]
assert Map.get(result, :b) == [3, 4]
end
end
describe "tuple function with clojurify/elixirify" do
test "tuple function result can be clojurified" do
result = eval!("""
(clojurify (tuple :ok "data"))
""")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(result) == [:ok, "data"]
end
test "elixirify vector matches tuple construction" do
# elixirify produces a list, not a tuple (by spec)
result = eval!("(elixirify [1 2 3])")
assert is_list(result)
assert result == [1, 2, 3]
end
test "tuple-size on tuple function result" do
result = eval!("(tuple-size (tuple :a :b :c :d))")
assert result == 4
end
end
describe "composition with core functions" do
test "map over list then clojurify" do
result = eval!("""
(clojurify (Enum/map '(1 2 3) (fn [x] (* x 2))))
""")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(result) == [2, 4, 6]
end
test "elixirify vector for Enum interop" do
result = eval!("""
(Enum/sum (elixirify [1 2 3 4 5]))
""")
assert result == 15
end
test "clojurify in let binding" do
result = eval!("""
(let [v (clojurify #el[:ok 42])]
(nth v 1))
""")
assert result == 42
end
test "elixirify in let binding" do
result = eval!("""
(let [lst (elixirify [10 20 30])]
(hd lst))
""")
assert result == 10
end
end
# ==========================================================================
# SubVector clojurify/elixirify
# ==========================================================================
describe "SubVector clojurify" do
test "clojurify subvec returns vector" do
result = eval!("""
(let [v [1 2 3 4 5]
sv (subvec v 1 4)]
(clojurify sv))
""")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(result) == [2, 3, 4]
end
end
describe "SubVector elixirify" do
test "elixirify subvec returns list" do
result = eval!("""
(let [v [1 2 3 4 5]
sv (subvec v 1 4)]
(elixirify sv))
""")
assert is_list(result)
assert result == [2, 3, 4]
end
end
# ==========================================================================
# Protocol extensibility
# ==========================================================================
describe "protocol extensibility" do
test "defrecord can extend IElixirify" do
result = eval!("""
(defmodule TestUser
(defrecord User [name age]
CljElixir.IElixirify
(-elixirify [u] {:name (Map/get u :name) :age (Map/get u :age) :type "user"})))
(let [u (TestUser.User/new "Alice" 30)]
(elixirify u))
""")
assert is_map(result)
assert Map.get(result, :name) == "Alice"
assert Map.get(result, :age) == 30
assert Map.get(result, :type) == "user"
end
test "defrecord can extend IClojurify" do
result = eval!("""
(defmodule TestPoint
(defrecord Point [x y]
CljElixir.IClojurify
(-clojurify [p] [(Map/get p :x) (Map/get p :y)])))
(let [p (TestPoint.Point/new 10 20)]
(clojurify p))
""")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(result) == [10, 20]
end
end
# ==========================================================================
# Tuple sequence and collection operations
# ==========================================================================
describe "tuple sequence operations" do
test "seq on tuple" do
result = eval!("(seq #el[1 2 3])")
assert is_list(result)
assert result == [1, 2, 3]
end
test "seq on empty tuple" do
result = eval!("(seq (tuple))")
assert result == nil
end
test "first on tuple" do
result = eval!("(first #el[:a :b :c])")
assert result == :a
end
test "rest on tuple" do
result = eval!("(rest #el[:a :b :c])")
assert is_list(result)
assert result == [:b, :c]
end
test "conj on tuple" do
result = eval!("(conj #el[1 2] 3)")
assert is_tuple(result)
assert result == {1, 2, 3}
end
test "into empty tuple from vector" do
result = eval!("(into (tuple) [1 2 3])")
assert is_tuple(result)
assert result == {1, 2, 3}
end
test "into vector from tuple" do
result = eval!("(into [] #el[1 2 3])")
assert result.__struct__ == CljElixir.PersistentVector
assert CljElixir.PersistentVector.to_list(result) == [1, 2, 3]
end
test "into empty tuple from list" do
result = eval!("(into (tuple) '(1 2 3))")
assert is_tuple(result)
assert result == {1, 2, 3}
end
test "count on tuple via seq" do
result = eval!("(count #el[1 2 3 4])")
assert result == 4
end
end
end
+133
View File
@@ -0,0 +1,133 @@
defmodule CljElixir.Phase5Test do
use ExUnit.Case, async: false
# Helper to compile and evaluate CljElixir code
defp eval!(source) do
case CljElixir.Compiler.eval_string(source) do
{:ok, result, _bindings} -> result
{:error, errors} -> raise "CljElixir eval error: #{inspect(errors)}"
end
end
# ==========================================================================
# GenServer integration
# ==========================================================================
describe "GenServer - simple counter" do
test "define and use a counter GenServer" do
eval!("""
(defmodule TestCounter
(use GenServer)
(defn init [initial]
#el[:ok initial])
(defn handle-call
([:get _from state]
#el[:reply state state])
([:increment _from state]
#el[:reply :ok (+ state 1)])))
""")
{:ok, pid} = GenServer.start_link(TestCounter, 0)
assert GenServer.call(pid, :get) == 0
assert GenServer.call(pid, :increment) == :ok
assert GenServer.call(pid, :get) == 1
GenServer.stop(pid)
end
end
describe "GenServer - handle_cast" do
test "cast resets state" do
eval!("""
(defmodule TestCaster
(use GenServer)
(defn init [initial]
#el[:ok initial])
(defn handle-call
([:get _from state]
#el[:reply state state]))
(defn handle-cast
([:reset _state]
#el[:noreply 0])))
""")
{:ok, pid} = GenServer.start_link(TestCaster, 42)
assert GenServer.call(pid, :get) == 42
GenServer.cast(pid, :reset)
Process.sleep(50)
assert GenServer.call(pid, :get) == 0
GenServer.stop(pid)
end
end
describe "GenServer - handle_info" do
test "handle-info receives plain messages" do
eval!("""
(defmodule TestInfoHandler
(use GenServer)
(defn init [initial]
#el[:ok initial])
(defn handle-call
([:get _from state]
#el[:reply state state]))
(defn handle-info
([:bump state]
#el[:noreply (+ state 1)])))
""")
{:ok, pid} = GenServer.start_link(TestInfoHandler, 0)
send(pid, :bump)
Process.sleep(50)
assert GenServer.call(pid, :get) == 1
GenServer.stop(pid)
end
end
# ==========================================================================
# ChatRoom pattern: spawn + send + receive loop
# ==========================================================================
describe "ChatRoom pattern" do
test "spawn + send + receive loop" do
eval!("""
(defmodule TestChatLoop
(defn loop [state]
(receive
[:ping pid]
(do
(send pid #el[:pong state])
(TestChatLoop/loop (+ state 1)))
[:get pid]
(do
(send pid #el[:count state])
(TestChatLoop/loop state))
:stop
:stopped
:after 5000
:timeout)))
""")
pid = spawn(fn -> TestChatLoop.loop(0) end)
send(pid, {:ping, self()})
assert_receive {:pong, 0}, 1000
send(pid, {:ping, self()})
assert_receive {:pong, 1}, 1000
send(pid, {:get, self()})
assert_receive {:count, 2}, 1000
send(pid, :stop)
Process.sleep(50)
refute Process.alive?(pid)
end
end
end
+494
View File
@@ -0,0 +1,494 @@
defmodule CljElixir.Phase6Test do
use ExUnit.Case, async: false
# Helper to compile and evaluate CljElixir code
defp eval!(source) do
case CljElixir.Compiler.eval_string(source) do
{:ok, result, _bindings} -> result
{:error, errors} -> raise "CljElixir eval error: #{inspect(errors)}"
end
end
# ==========================================================================
# Thread-first (->)
# ==========================================================================
describe "-> (thread-first)" do
test "single value passthrough: (-> 1) => 1" do
assert eval!("(-> 1)") == 1
end
test "basic threading with bare symbols: (-> 1 inc inc) => 3" do
assert eval!("(-> 1 inc inc)") == 3
end
test "threading into multi-arg function: (-> \"hello\" (str \" world\"))" do
assert eval!("(-> \"hello\" (str \" world\"))") == "hello world"
end
test "threading with arithmetic: (-> 5 inc (+ 10)) => 16" do
assert eval!("(-> 5 inc (+ 10))") == 16
end
test "threading with module calls: (-> \"hello\" (String/upcase))" do
assert eval!("(-> \"hello\" (String/upcase))") == "HELLO"
end
test "threading into first position of list operations" do
# (-> [1 2 3] (Enum/at 0)) => 1
assert eval!("(-> [1 2 3] (Enum/at 0))") == 1
end
test "nested threading" do
# (-> 1 (-> inc inc)) is valid — inner -> produces 3? No:
# (-> 1 inc (+ (-> 10 dec))) = (+ (inc 1) (dec 10)) = (+ 2 9) = 11
assert eval!("(-> 1 inc (+ (-> 10 dec)))") == 11
end
test "thread-first with let binding" do
result = eval!("""
(let [x 5]
(-> x inc inc))
""")
assert result == 7
end
test "threading with comparison" do
assert eval!("(-> 5 inc (> 3))") == true
end
end
# ==========================================================================
# Thread-last (->>)
# ==========================================================================
describe "->> (thread-last)" do
test "single value passthrough: (->> 1) => 1" do
assert eval!("(->> 1)") == 1
end
test "thread-last with bare symbols: (->> 1 inc inc) => 3" do
assert eval!("(->> 1 inc inc)") == 3
end
test "thread-last inserts as last argument" do
# (->> 1 (+ 10)) => (+ 10 1) => 11
assert eval!("(->> 1 (+ 10))") == 11
end
test "thread-last with map over list" do
# (->> [1 2 3] (map (fn [x] (inc x)))) => (map (fn [x] (inc x)) [1 2 3]) => [2 3 4]
assert eval!("(->> [1 2 3] (map (fn [x] (inc x))))") == [2, 3, 4]
end
test "thread-last with filter" do
# (->> [1 2 3 4 5] (filter (fn [x] (> x 2)))) => [3, 4, 5]
assert eval!("(->> [1 2 3 4 5] (filter (fn [x] (> x 2))))") == [3, 4, 5]
end
test "thread-last chaining collection ops" do
# (->> [1 2 3 4 5] (map (fn [x] (inc x))) (filter (fn [x] (> x 3))))
# => (filter (fn [x] (> x 3)) (map (fn [x] (inc x)) [1 2 3 4 5]))
# => (filter (fn [x] (> x 3)) [2 3 4 5 6])
# => [4 5 6]
assert eval!("(->> [1 2 3 4 5] (map (fn [x] (inc x))) (filter (fn [x] (> x 3))))") == [4, 5, 6]
end
test "nested thread-last" do
assert eval!("(->> 10 dec (+ (->> 1 inc)))") == 11
end
end
# ==========================================================================
# Mixed / edge cases
# ==========================================================================
describe "threading edge cases" do
test "threading with keyword-as-function" do
# (-> {:name "Alice"} :name) => "Alice"
assert eval!("(-> {:name \"Alice\"} :name)") == "Alice"
end
test "thread-first string operations" do
assert eval!("(-> \"hello world\" (String/upcase) (String/split \" \"))") == ["HELLO", "WORLD"]
end
test "deeply nested threading" do
# (-> 0 inc inc inc inc inc) => 5
assert eval!("(-> 0 inc inc inc inc inc)") == 5
end
test "thread-first with dec" do
assert eval!("(-> 10 dec dec dec)") == 7
end
test "thread-last with Enum/reduce" do
# (->> [1 2 3 4] (Enum/sum)) => 10
assert eval!("(->> [1 2 3 4] (Enum/sum))") == 10
end
end
# ==========================================================================
# try / catch / finally
# ==========================================================================
describe "try/catch/finally" do
test "try with rescue catches exception" do
result = eval!("(try (throw \"boom\") (catch e (str \"caught: \" (Exception/message e))))")
assert result == "caught: boom"
end
test "try with typed rescue" do
result = eval!("""
(try
(throw "boom")
(catch RuntimeError e
(str "runtime: " (Exception/message e))))
""")
assert result == "runtime: boom"
end
test "try with finally" do
# finally runs but doesn't affect return value
result = eval!("""
(try
42
(finally (println "cleanup")))
""")
assert result == 42
end
test "try with catch :throw" do
result = eval!("""
(try
(Kernel/throw :oops)
(catch :throw val val))
""")
assert result == :oops
end
test "try with catch :exit" do
result = eval!("""
(try
(Kernel/exit :shutdown)
(catch :exit reason reason))
""")
assert result == :shutdown
end
test "try returns body value when no exception" do
result = eval!("(try (+ 1 2) (catch e e))")
assert result == 3
end
test "try with multiple catch clauses" do
result = eval!("""
(try
(throw "oops")
(catch ArgumentError e :arg_error)
(catch RuntimeError e :runtime_error))
""")
assert result == :runtime_error
end
test "try with rescue and finally" do
result = eval!("""
(try
(throw "oops")
(catch e :caught)
(finally (println "done")))
""")
assert result == :caught
end
end
# ==========================================================================
# & rest variadic params
# ==========================================================================
describe "& rest variadic params" do
test "defn with & rest, no rest args (uses default [])" do
result = eval!("""
(defmodule VarTest1
(defn foo [x & rest]
(count rest)))
(VarTest1/foo 1)
""")
assert result == 0
end
test "defn with & rest, with rest args passed as list" do
result = eval!("""
(defmodule VarTest2
(defn foo [x & rest]
rest))
(VarTest2/foo 1 (list 2 3 4))
""")
assert result == [2, 3, 4]
end
test "defn with & rest uses rest in body" do
result = eval!("""
(defmodule VarTest3
(defn foo [x & rest]
(+ x (count rest))))
(VarTest3/foo 10)
""")
assert result == 10
end
test "defn with & rest, multiple required params" do
result = eval!("""
(defmodule VarTest4
(defn foo [a b & rest]
(+ a b (count rest))))
(VarTest4/foo 1 2)
""")
assert result == 3
end
test "defn with & rest, with rest args and multiple required params" do
result = eval!("""
(defmodule VarTest4b
(defn foo [a b & rest]
(+ a b (count rest))))
(VarTest4b/foo 1 2 (list 10 20 30))
""")
assert result == 6
end
test "fn with & rest called inline" do
# Call the fn inline since let-bound fn variable calls aren't supported yet
result = eval!("""
((fn [x & rest] (+ x (count rest))) 5 (list 1 2 3))
""")
assert result == 8
end
test "defn with only & rest param" do
result = eval!("""
(defmodule VarTest5
(defn foo [& args]
(count args)))
(VarTest5/foo)
""")
assert result == 0
end
test "defn with only & rest param, with args" do
result = eval!("""
(defmodule VarTest6
(defn foo [& args]
args))
(VarTest6/foo (list 1 2 3))
""")
assert result == [1, 2, 3]
end
end
# ==========================================================================
# Destructuring
# ==========================================================================
describe "destructuring" do
test "map :keys destructuring in let" do
result = eval!("""
(let [{:keys [name age]} {:name "alice" :age 30}]
(str name " is " age))
""")
assert result == "alice is 30"
end
test "map :keys with :as" do
result = eval!("""
(let [{:keys [name] :as person} {:name "bob" :age 25}]
(str name " " (count person)))
""")
# count on a map returns number of k/v pairs
assert result == "bob 2"
end
test "map :strs destructuring" do
result = eval!("""
(let [{:strs [name]} {"name" "charlie"}]
name)
""")
assert result == "charlie"
end
test "map destructuring with literal keys" do
result = eval!("""
(let [{x :x y :y} {:x 1 :y 2}]
(+ x y))
""")
assert result == 3
end
test "sequential destructuring with & rest in let" do
result = eval!("""
(let [[a b & rest] (list 1 2 3 4 5)]
rest)
""")
assert result == [3, 4, 5]
end
test "sequential destructuring without rest" do
# Without &, vector in pattern still matches tuple
result = eval!("""
(let [[a b] #el[1 2]]
(+ a b))
""")
assert result == 3
end
test "map :keys in defn params" do
result = eval!("""
(defmodule DestructTest1
(defn greet [{:keys [name greeting]}]
(str greeting " " name)))
(DestructTest1/greet {:name "alice" :greeting "hi"})
""")
assert result == "hi alice"
end
test "sequential destructuring in fn params" do
# Call fn inline since let-bound fn variable calls aren't supported yet
result = eval!("""
((fn [[a b & rest]] (+ a b (count rest))) (list 10 20 30 40))
""")
assert result == 32
end
test "nested map destructuring" do
result = eval!("""
(let [{:keys [name] {:keys [city]} :address} {:name "alice" :address {:city "NYC"}}]
(str name " in " city))
""")
assert result == "alice in NYC"
end
test "map :keys in for binding" do
result = eval!("""
(for [{:keys [name]} (list {:name "a"} {:name "b"} {:name "c"})]
name)
""")
assert result == ["a", "b", "c"]
end
test "sequential destructuring in for with &" do
result = eval!("""
(for [[a b & _rest] (list (list 1 2 99) (list 3 4 99))]
(+ a b))
""")
assert result == [3, 7]
end
test "map destructuring with hyphenated keys" do
result = eval!("""
(let [{:keys [first-name]} {:"first-name" "alice"}]
first-name)
""")
assert result == "alice"
end
end
# ==========================================================================
# defmacro
# ==========================================================================
describe "defmacro" do
test "simple macro - unless" do
result = eval!("""
(defmodule MacroTest1
(defmacro unless [test then]
`(if (not ~test) ~then))
(defn check [x]
(unless (> x 0) :negative)))
(MacroTest1/check -5)
""")
assert result == :negative
end
test "macro with & body and splice-unquote" do
result = eval!("""
(defmodule MacroTest2
(defmacro unless [test & body]
`(if (not ~test) (do ~@body)))
(defn check [x]
(unless (> x 0) :negative)))
(MacroTest2/check -1)
""")
assert result == :negative
end
test "macro with multiple body forms" do
result = eval!("""
(defmodule MacroTest3
(defmacro unless [test & body]
`(if (not ~test) (do ~@body)))
(defn check [x]
(unless (> x 0)
(println "not positive")
:negative)))
(MacroTest3/check -1)
""")
assert result == :negative
end
test "macro expands correctly with complex expressions" do
result = eval!("""
(defmodule MacroTest4
(defmacro when-positive [x & body]
`(if (> ~x 0) (do ~@body)))
(defn test-it [n]
(when-positive n
(+ n 10))))
(MacroTest4/test-it 5)
""")
assert result == 15
end
test "macro returns nil when condition not met" do
result = eval!("""
(defmodule MacroTest5
(defmacro when-positive [x & body]
`(if (> ~x 0) (do ~@body)))
(defn test-it [n]
(when-positive n
(+ n 10))))
(MacroTest5/test-it -5)
""")
assert result == nil
end
test "auto-gensym in macro" do
result = eval!("""
(defmodule MacroTest6
(defmacro my-let1 [val & body]
`(let [result# ~val]
(do ~@body)))
(defn use-it []
(my-let1 42
:ok)))
(MacroTest6/use-it)
""")
assert result == :ok
end
test "multiple macros in same module" do
result = eval!("""
(defmodule MacroTest7
(defmacro unless [test & body]
`(if (not ~test) (do ~@body)))
(defmacro when-positive [x & body]
`(if (> ~x 0) (do ~@body)))
(defn check [x]
(if (when-positive x (> x 10))
:big
(unless (> x 0) :non-positive))))
(MacroTest7/check 20)
""")
assert result == :big
end
end
end
+232
View File
@@ -0,0 +1,232 @@
defmodule CljElixir.Phase7Test do
use ExUnit.Case, async: false
defp eval!(source) do
case CljElixir.Compiler.eval_string(source) do
{:ok, result, _} -> result
{:error, errors} -> raise "Compilation failed: #{inspect(errors)}"
end
end
defp compile!(source) do
case CljElixir.Compiler.compile_to_beam(source) do
{:ok, modules} -> modules
{:error, errors} -> raise "Compilation failed: #{inspect(errors)}"
end
end
describe "m/=> function specs" do
test "simple function spec compiles" do
compile!("""
(defmodule SpecTest1
(defn hello [name]
(str "hello " name))
(m/=> hello [:=> [:cat :string] :string]))
""")
assert true
after
:code.purge(SpecTest1)
:code.delete(SpecTest1)
end
test "multi-param function spec" do
compile!("""
(defmodule SpecTest2
(defn add [a b]
(+ a b))
(m/=> add [:=> [:cat :int :int] :int]))
""")
assert true
after
:code.purge(SpecTest2)
:code.delete(SpecTest2)
end
test "optional param generates multiple specs" do
compile!("""
(defmodule SpecTest3
(defn greet
([name] (greet name "hello"))
([name greeting] (str greeting " " name)))
(m/=> greet [:=> [:cat :string [:? :string]] :string]))
""")
assert true
after
:code.purge(SpecTest3)
:code.delete(SpecTest3)
end
test "multi-arity via :function" do
compile!("""
(defmodule SpecTest4
(defn greet
([name] (greet name "hello"))
([name greeting] (str greeting " " name)))
(m/=> greet [:function
[:=> [:cat :string] :string]
[:=> [:cat :string :string] :string]]))
""")
assert true
after
:code.purge(SpecTest4)
:code.delete(SpecTest4)
end
test "spec with various types" do
compile!("""
(defmodule SpecTest5
(defn process [x]
x)
(m/=> process [:=> [:cat :any] [:or :int :string :nil]]))
""")
assert true
after
:code.purge(SpecTest5)
:code.delete(SpecTest5)
end
test "spec with map type" do
compile!("""
(defmodule SpecTest6
(defn get-name [user]
(:name user))
(m/=> get-name [:=> [:cat [:map [:name :string] [:age :int]]] :string]))
""")
assert true
after
:code.purge(SpecTest6)
:code.delete(SpecTest6)
end
test "spec with tuple return" do
compile!("""
(defmodule SpecTest7
(defn fetch [id]
#el[:ok id])
(m/=> fetch [:=> [:cat :int] [:tuple :atom :int]]))
""")
assert true
after
:code.purge(SpecTest7)
:code.delete(SpecTest7)
end
end
describe "auto @type from def schemas" do
test "def with map schema generates type" do
compile!("""
(defmodule TypeTest1
(def User [:map [:name :string] [:age :int]])
(defn get-name [user]
(:name user)))
""")
assert true
after
:code.purge(TypeTest1)
:code.delete(TypeTest1)
end
test "def with or schema" do
compile!("""
(defmodule TypeTest2
(def Status [:enum :active :inactive :pending])
(defn check [s] s))
""")
assert true
after
:code.purge(TypeTest2)
:code.delete(TypeTest2)
end
test "def with and schema" do
compile!("""
(defmodule TypeTest3
(def PositiveInt [:and :int [:> 0]])
(defn check [n] n))
""")
assert true
after
:code.purge(TypeTest3)
:code.delete(TypeTest3)
end
end
describe "schema cross-references" do
test "spec references a named schema type" do
compile!("""
(defmodule CrossRefTest1
(def User [:map [:name :string] [:age :int]])
(defn get-user [id]
{:name "alice" :age 30})
(m/=> get-user [:=> [:cat :int] User]))
""")
assert true
after
:code.purge(CrossRefTest1)
:code.delete(CrossRefTest1)
end
test "schema references another schema" do
compile!("""
(defmodule CrossRefTest2
(def PositiveInt [:and :int [:> 0]])
(def Config [:map
[:host :string]
[:port PositiveInt]
[:ssl? :boolean]])
(defn load-config []
{:host "localhost" :port 8080 :"ssl?" true}))
""")
assert true
after
:code.purge(CrossRefTest2)
:code.delete(CrossRefTest2)
end
end
describe "recursive schemas" do
test "recursive schema with registry" do
compile!("""
(defmodule RecursiveTest1
(def Tree [:schema {:registry {:tree [:or :int [:tuple [:ref :tree] [:ref :tree]]]}} [:ref :tree]])
(defn make-leaf [n] n))
""")
assert true
after
:code.purge(RecursiveTest1)
:code.delete(RecursiveTest1)
end
end
describe "functions still work correctly" do
test "module with spec can be called" do
result = eval!("""
(defmodule FuncSpecTest1
(defn hello [name]
(str "hello " name))
(m/=> hello [:=> [:cat :string] :string]))
(FuncSpecTest1/hello "world")
""")
assert result == "hello world"
after
:code.purge(FuncSpecTest1)
:code.delete(FuncSpecTest1)
end
test "module with type and spec" do
result = eval!("""
(defmodule FuncSpecTest2
(def User [:map [:name :string] [:age :int]])
(defn make-user [name age]
{:name name :age age})
(m/=> make-user [:=> [:cat :string :int] User]))
(FuncSpecTest2/make-user "alice" 30)
""")
assert result == %{name: "alice", age: 30}
after
:code.purge(FuncSpecTest2)
:code.delete(FuncSpecTest2)
end
end
end
+820
View File
@@ -0,0 +1,820 @@
defmodule CljElixir.ReaderTest do
use ExUnit.Case, async: true
alias CljElixir.Reader
# ═══════════════════════════════════════════════════════════════════
# Helpers
# ═══════════════════════════════════════════════════════════════════
defp read!(source) do
{:ok, forms} = Reader.read_string(source)
forms
end
defp read_one!(source) do
[form] = read!(source)
form
end
# ═══════════════════════════════════════════════════════════════════
# Literal types
# ═══════════════════════════════════════════════════════════════════
describe "integers" do
test "positive integer" do
assert read_one!("42") == 42
end
test "zero" do
assert read_one!("0") == 0
end
test "negative integer" do
assert read_one!("-3") == -3
end
test "multi-digit" do
assert read_one!("12345") == 12345
end
end
describe "floats" do
test "simple float" do
assert read_one!("3.14") == 3.14
end
test "negative float" do
assert read_one!("-2.5") == -2.5
end
test "float starting with zero" do
assert read_one!("0.001") == 0.001
end
end
describe "strings" do
test "simple string" do
assert read_one!(~s("hello")) == "hello"
end
test "empty string" do
assert read_one!(~s("")) == ""
end
test "string with spaces" do
assert read_one!(~s("hello world")) == "hello world"
end
test "escaped double quote" do
assert read_one!(~s("say \\"hi\\"")) == ~s(say "hi")
end
test "escaped backslash" do
assert read_one!(~s("path\\\\to")) == "path\\to"
end
test "escaped newline" do
assert read_one!(~s("line1\\nline2")) == "line1\nline2"
end
test "escaped tab" do
assert read_one!(~s("col1\\tcol2")) == "col1\tcol2"
end
test "escaped carriage return" do
assert read_one!(~s("before\\rafter")) == "before\rafter"
end
test "multiline string" do
input = ~s("line1\nline2")
assert read_one!(input) == "line1\nline2"
end
end
describe "keywords" do
test "simple keyword" do
assert read_one!(":ok") == :ok
end
test "keyword with hyphen" do
assert read_one!(":my-key") == :"my-key"
end
test "keyword with numbers" do
assert read_one!(":v2") == :v2
end
test "quoted keyword" do
assert read_one!(~s(:"quoted-name")) == :"quoted-name"
end
test "keyword with question mark" do
assert read_one!(":empty?") == :empty?
end
test "keyword with exclamation" do
assert read_one!(":reset!") == :reset!
end
end
describe "booleans" do
test "true" do
assert read_one!("true") == true
end
test "false" do
assert read_one!("false") == false
end
end
describe "nil" do
test "nil literal" do
assert read_one!("nil") == nil
end
end
# ═══════════════════════════════════════════════════════════════════
# Symbols
# ═══════════════════════════════════════════════════════════════════
describe "symbols" do
test "simple symbol" do
assert read_one!("hello") == {:symbol, %{line: 1, col: 1}, "hello"}
end
test "symbol with hyphen" do
assert read_one!("my-func") == {:symbol, %{line: 1, col: 1}, "my-func"}
end
test "qualified symbol with slash" do
assert read_one!("Enum/map") == {:symbol, %{line: 1, col: 1}, "Enum/map"}
end
test "erlang module call" do
assert read_one!("io/format") == {:symbol, %{line: 1, col: 1}, "io/format"}
end
test "dynamic var *name*" do
assert read_one!("*self*") == {:symbol, %{line: 1, col: 1}, "*self*"}
end
test "symbol with question mark" do
assert read_one!("empty?") == {:symbol, %{line: 1, col: 1}, "empty?"}
end
test "symbol with exclamation" do
assert read_one!("swap!") == {:symbol, %{line: 1, col: 1}, "swap!"}
end
test "operator symbols" do
assert read_one!("+") == {:symbol, %{line: 1, col: 1}, "+"}
assert read_one!("-") == {:symbol, %{line: 1, col: 1}, "-"}
assert read_one!("*") == {:symbol, %{line: 1, col: 1}, "*"}
assert read_one!(">=") == {:symbol, %{line: 1, col: 1}, ">="}
assert read_one!("<=") == {:symbol, %{line: 1, col: 1}, "<="}
assert read_one!("!=") == {:symbol, %{line: 1, col: 1}, "!="}
end
test "underscore symbol" do
assert read_one!("_") == {:symbol, %{line: 1, col: 1}, "_"}
end
test "anon fn arg %" do
assert read_one!("%") == {:symbol, %{line: 1, col: 1}, "%"}
end
test "anon fn numbered args %1 %2" do
assert read_one!("%1") == {:symbol, %{line: 1, col: 1}, "%1"}
assert read_one!("%2") == {:symbol, %{line: 1, col: 1}, "%2"}
end
test "ampersand for rest args" do
assert read_one!("&") == {:symbol, %{line: 1, col: 1}, "&"}
end
test "defn- private function name" do
assert read_one!("defn-") == {:symbol, %{line: 1, col: 1}, "defn-"}
end
test "symbol starting with dot" do
assert read_one!(".method") == {:symbol, %{line: 1, col: 1}, ".method"}
end
end
# ═══════════════════════════════════════════════════════════════════
# Collection types
# ═══════════════════════════════════════════════════════════════════
describe "lists" do
test "simple list" do
{:list, meta, elements} = read_one!("(+ 1 2)")
assert meta == %{line: 1, col: 1}
assert elements == [{:symbol, %{line: 1, col: 2}, "+"}, 1, 2]
end
test "empty list" do
assert read_one!("()") == {:list, %{line: 1, col: 1}, []}
end
test "nested list" do
{:list, _, [_, {:list, inner_meta, inner_elems}]} = read_one!("(a (b c))")
assert inner_meta == %{line: 1, col: 4}
assert inner_elems == [{:symbol, %{line: 1, col: 5}, "b"}, {:symbol, %{line: 1, col: 7}, "c"}]
end
end
describe "vectors" do
test "simple vector" do
{:vector, meta, elements} = read_one!("[1 2 3]")
assert meta == %{line: 1, col: 1}
assert elements == [1, 2, 3]
end
test "empty vector" do
assert read_one!("[]") == {:vector, %{line: 1, col: 1}, []}
end
test "vector with mixed types" do
{:vector, _, elems} = read_one!("[:ok 42 \"hello\"]")
assert elems == [:ok, 42, "hello"]
end
end
describe "maps" do
test "simple map" do
{:map, meta, elements} = read_one!("{:name \"Ada\" :age 30}")
assert meta == %{line: 1, col: 1}
assert elements == [:name, "Ada", :age, 30]
end
test "empty map" do
assert read_one!("{}") == {:map, %{line: 1, col: 1}, []}
end
test "map with nested values" do
{:map, _, elements} = read_one!("{:a [1 2] :b {:c 3}}")
assert length(elements) == 4
assert Enum.at(elements, 0) == :a
assert {:vector, _, [1, 2]} = Enum.at(elements, 1)
assert Enum.at(elements, 2) == :b
assert {:map, _, [:c, 3]} = Enum.at(elements, 3)
end
end
describe "sets" do
test "simple set" do
{:set, meta, elements} = read_one!("\#{:a :b :c}")
assert meta == %{line: 1, col: 1}
assert elements == [:a, :b, :c]
end
test "empty set" do
assert read_one!("\#{}") == {:set, %{line: 1, col: 1}, []}
end
test "nested set containing a set" do
{:set, _, elements} = read_one!("\#{\#{:a}}")
assert [inner] = elements
assert {:set, _, [:a]} = inner
end
end
describe "tuples (#el[...])" do
test "simple tuple" do
{:tuple, meta, elements} = read_one!("#el[:ok value]")
assert meta == %{line: 1, col: 1}
assert elements == [:ok, {:symbol, %{line: 1, col: 9}, "value"}]
end
test "empty tuple" do
assert read_one!("#el[]") == {:tuple, %{line: 1, col: 1}, []}
end
test "tuple with nested data" do
{:tuple, _, elements} = read_one!("#el[:ok {:name \"Ada\"}]")
assert Enum.at(elements, 0) == :ok
assert {:map, _, [:name, "Ada"]} = Enum.at(elements, 1)
end
end
# ═══════════════════════════════════════════════════════════════════
# Nested structures
# ═══════════════════════════════════════════════════════════════════
describe "nested structures" do
test "deeply nested list" do
{:list, _, [_, {:list, _, [_, {:list, _, [sym]}]}]} = read_one!("(a (b (c)))")
assert sym == {:symbol, %{line: 1, col: 8}, "c"}
end
test "vector inside map inside list" do
{:list, _, [sym, {:map, _, [:data, {:vector, _, [1, 2, 3]}]}]} =
read_one!("(process {:data [1 2 3]})")
assert sym == {:symbol, %{line: 1, col: 2}, "process"}
end
test "let binding form" do
{:list, _, [let_sym, {:vector, _, bindings}, body]} =
read_one!("(let [x 1 y 2] (+ x y))")
assert let_sym == {:symbol, %{line: 1, col: 2}, "let"}
assert bindings == [{:symbol, %{line: 1, col: 7}, "x"}, 1, {:symbol, %{line: 1, col: 11}, "y"}, 2]
assert {:list, _, [{:symbol, _, "+"}, {:symbol, _, "x"}, {:symbol, _, "y"}]} = body
end
end
# ═══════════════════════════════════════════════════════════════════
# Prefix forms
# ═══════════════════════════════════════════════════════════════════
describe "quote" do
test "quote a list" do
{:quote, meta, inner} = read_one!("'(1 2 3)")
assert meta == %{line: 1, col: 1}
assert {:list, _, [1, 2, 3]} = inner
end
test "quote a symbol" do
{:quote, _, inner} = read_one!("'hello")
assert inner == {:symbol, %{line: 1, col: 2}, "hello"}
end
end
describe "quasiquote" do
test "quasiquote a list" do
{:quasiquote, meta, inner} = read_one!("`(list ~x ~@rest)")
assert meta == %{line: 1, col: 1}
assert {:list, _, [_, {:unquote, _, _}, {:splice_unquote, _, _}]} = inner
end
end
describe "unquote" do
test "unquote a symbol" do
{:unquote, meta, inner} = read_one!("~x")
assert meta == %{line: 1, col: 1}
assert inner == {:symbol, %{line: 1, col: 2}, "x"}
end
end
describe "splice-unquote" do
test "splice-unquote a symbol" do
{:splice_unquote, meta, inner} = read_one!("~@items")
assert meta == %{line: 1, col: 1}
assert inner == {:symbol, %{line: 1, col: 3}, "items"}
end
end
describe "deref" do
test "deref a symbol" do
{:deref, meta, inner} = read_one!("@my-atom")
assert meta == %{line: 1, col: 1}
assert inner == {:symbol, %{line: 1, col: 2}, "my-atom"}
end
end
describe "metadata" do
test "map metadata" do
{:with_meta, meta, {meta_map, target}} =
read_one!("^{:doc \"hello\"} my-fn")
assert meta == %{line: 1, col: 1}
assert {:map, _, [:doc, "hello"]} = meta_map
assert target == {:symbol, %{line: 1, col: 17}, "my-fn"}
end
test "keyword metadata shorthand" do
{:with_meta, _, {meta_map, target}} =
read_one!("^:private my-fn")
assert {:map, _, [:private, true]} = meta_map
assert target == {:symbol, %{line: 1, col: 11}, "my-fn"}
end
test "metadata on a vector" do
{:with_meta, _, {meta_map, target}} =
read_one!("^:dynamic [1 2]")
assert {:map, _, [:dynamic, true]} = meta_map
assert {:vector, _, [1, 2]} = target
end
end
# ═══════════════════════════════════════════════════════════════════
# Anonymous function shorthand
# ═══════════════════════════════════════════════════════════════════
describe "anonymous function #(...)" do
test "simple anon fn" do
{:anon_fn, meta, body} = read_one!("#(* % 2)")
assert meta == %{line: 1, col: 1}
assert {:list, _, [{:symbol, _, "*"}, {:symbol, _, "%"}, 2]} = body
end
test "anon fn with multiple args" do
{:anon_fn, _, body} = read_one!("#(+ %1 %2)")
assert {:list, _, [{:symbol, _, "+"}, {:symbol, _, "%1"}, {:symbol, _, "%2"}]} = body
end
test "anon fn with nested call" do
{:anon_fn, _, body} = read_one!("#(str \"hello \" %)")
assert {:list, _, [{:symbol, _, "str"}, "hello ", {:symbol, _, "%"}]} = body
end
end
# ═══════════════════════════════════════════════════════════════════
# Regex literals
# ═══════════════════════════════════════════════════════════════════
describe "regex literals" do
test "simple regex" do
{:regex, meta, pattern} = read_one!(~s(#"pattern"))
assert meta == %{line: 1, col: 1}
assert pattern == "pattern"
end
test "regex with special chars" do
{:regex, _, pattern} = read_one!(~s(#"^\\d{3}-\\d{4}$"))
assert pattern == "^\\d{3}-\\d{4}$"
end
end
# ═══════════════════════════════════════════════════════════════════
# Comments and whitespace
# ═══════════════════════════════════════════════════════════════════
describe "comments" do
test "single-line comment ignored" do
forms = read!("; this is a comment\n42")
assert forms == [42]
end
test "comment after form" do
forms = read!("42 ; a number")
assert forms == [42]
end
test "multiple comments" do
forms = read!("; comment 1\n; comment 2\n42")
assert forms == [42]
end
test "comment between forms" do
forms = read!("1\n; between\n2")
assert forms == [1, 2]
end
end
describe "whitespace handling" do
test "commas are whitespace" do
{:vector, _, elems} = read_one!("[1, 2, 3]")
assert elems == [1, 2, 3]
end
test "commas in maps" do
{:map, _, elems} = read_one!("{:a 1, :b 2}")
assert elems == [:a, 1, :b, 2]
end
test "tabs and spaces" do
forms = read!(" \t 42")
assert forms == [42]
end
test "multiple newlines" do
forms = read!("\n\n42\n\n")
assert forms == [42]
end
end
# ═══════════════════════════════════════════════════════════════════
# Edge cases
# ═══════════════════════════════════════════════════════════════════
describe "negative numbers" do
test "negative integer as standalone" do
assert read_one!("-3") == -3
end
test "negative float as standalone" do
assert read_one!("-3.14") == -3.14
end
test "negative numbers inside list" do
{:list, _, elems} = read_one!("(-3 -4)")
assert elems == [-3, -4]
end
test "subtraction symbol followed by space and number" do
{:list, _, [sym, num]} = read_one!("(- 3)")
assert sym == {:symbol, %{line: 1, col: 2}, "-"}
assert num == 3
end
test "negative number after symbol in list" do
{:list, _, [sym, num]} = read_one!("(x -3)")
assert sym == {:symbol, %{line: 1, col: 2}, "x"}
assert num == -3
end
test "negative number in vector" do
{:vector, _, elems} = read_one!("[-1 -2 -3]")
assert elems == [-1, -2, -3]
end
end
describe "keywords with special chars" do
test "keyword with hyphen" do
assert read_one!(":my-key") == :"my-key"
end
test "keyword with question mark" do
assert read_one!(":valid?") == :valid?
end
test "keyword with dot" do
assert read_one!(":some.ns") == :"some.ns"
end
end
describe "empty collections" do
test "empty list" do
assert read_one!("()") == {:list, %{line: 1, col: 1}, []}
end
test "empty vector" do
assert read_one!("[]") == {:vector, %{line: 1, col: 1}, []}
end
test "empty map" do
assert read_one!("{}") == {:map, %{line: 1, col: 1}, []}
end
test "empty set" do
assert read_one!("\#{}") == {:set, %{line: 1, col: 1}, []}
end
test "empty tuple" do
assert read_one!("#el[]") == {:tuple, %{line: 1, col: 1}, []}
end
end
# ═══════════════════════════════════════════════════════════════════
# Error cases
# ═══════════════════════════════════════════════════════════════════
describe "error cases" do
test "unclosed list" do
assert {:error, msg} = Reader.read_string("(1 2 3")
assert msg =~ "expected ')'"
end
test "unclosed vector" do
assert {:error, msg} = Reader.read_string("[1 2 3")
assert msg =~ "expected ']'"
end
test "unclosed map" do
assert {:error, msg} = Reader.read_string("{:a 1")
assert msg =~ "expected '}'"
end
test "unclosed set" do
assert {:error, msg} = Reader.read_string("\#{:a :b")
assert msg =~ "expected '}'"
end
test "unclosed string" do
assert {:error, msg} = Reader.read_string(~s("hello))
assert msg =~ "Unterminated string"
end
test "unclosed tuple" do
assert {:error, msg} = Reader.read_string("#el[:ok")
assert msg =~ "expected ']'"
end
test "unexpected closing paren" do
assert {:error, _msg} = Reader.read_string(")")
end
test "unexpected closing bracket" do
assert {:error, _msg} = Reader.read_string("]")
end
test "unexpected closing brace" do
assert {:error, _msg} = Reader.read_string("}")
end
end
# ═══════════════════════════════════════════════════════════════════
# Multi-form parsing
# ═══════════════════════════════════════════════════════════════════
describe "multi-form parsing" do
test "multiple top-level forms" do
forms = read!("1 2 3")
assert forms == [1, 2, 3]
end
test "multiple forms of different types" do
forms = read!(":ok 42 \"hello\" true nil")
assert forms == [:ok, 42, "hello", true, nil]
end
test "multiple lists" do
forms = read!("(+ 1 2) (* 3 4)")
assert length(forms) == 2
assert {:list, _, _} = Enum.at(forms, 0)
assert {:list, _, _} = Enum.at(forms, 1)
end
test "forms separated by newlines" do
forms = read!("1\n2\n3")
assert forms == [1, 2, 3]
end
test "empty input" do
assert read!("") == []
end
test "only whitespace" do
assert read!(" \n\t ") == []
end
test "only comments" do
assert read!("; just a comment\n; another comment") == []
end
end
# ═══════════════════════════════════════════════════════════════════
# Line and column tracking
# ═══════════════════════════════════════════════════════════════════
describe "line and column tracking" do
test "first form at line 1, col 1" do
{:symbol, meta, _} = read_one!("hello")
assert meta == %{line: 1, col: 1}
end
test "form after newline tracks correct line" do
[_, {:symbol, meta, _}] = read!("foo\nbar")
assert meta.line == 2
assert meta.col == 1
end
test "form after comment tracks correct line" do
[form] = read!("; comment\nhello")
assert {:symbol, %{line: 2, col: 1}, "hello"} = form
end
test "elements inside collection track position" do
{:list, _, [_, second, _]} = read_one!("(a b c)")
assert {:symbol, %{line: 1, col: 4}, "b"} = second
end
end
# ═══════════════════════════════════════════════════════════════════
# Tokenizer-specific edge cases
# ═══════════════════════════════════════════════════════════════════
describe "tokenizer edge cases" do
test "dispatch #el[ is recognized as tuple start" do
{:tuple, _, [:ok]} = read_one!("#el[:ok]")
end
test "#el[ does not consume extra chars" do
{:tuple, _, [num]} = read_one!("#el[42]")
assert num == 42
end
test "hash dispatch for set vs tuple vs anon fn" do
assert {:set, _, _} = read_one!("\#{1 2}")
assert {:tuple, _, _} = read_one!("#el[1 2]")
assert {:anon_fn, _, _} = read_one!("#(+ 1 2)")
assert {:regex, _, _} = read_one!(~s(#"abc"))
end
end
# ═══════════════════════════════════════════════════════════════════
# Real-world: ChatRoom from the spec
# ═══════════════════════════════════════════════════════════════════
describe "ChatRoom example" do
test "parses the ChatRoom defmodule" do
source = """
(defmodule ChatRoom
(defn loop [state]
(receive
[:join username pid]
(let [members (assoc (:members state) username pid)]
(send pid [:welcome username (count members)])
(loop (assoc state :members members)))
[:message from body]
(do
(doseq [[_name pid] (:members state)]
(send pid [:chat from body]))
(loop state))
[:leave username]
(loop (update state :members dissoc username))
:shutdown
(do
(doseq [[_name pid] (:members state)]
(send pid :room-closed))
:ok))))
"""
{:ok, [form]} = Reader.read_string(source)
assert {:list, _, [defmod_sym, chatroom_sym | body]} = form
assert {:symbol, _, "defmodule"} = defmod_sym
assert {:symbol, _, "ChatRoom"} = chatroom_sym
# The body should contain the defn form
[defn_form] = body
assert {:list, _, [defn_sym, loop_sym, params | _rest_body]} = defn_form
assert {:symbol, _, "defn"} = defn_sym
assert {:symbol, _, "loop"} = loop_sym
assert {:vector, _, [{:symbol, _, "state"}]} = params
end
test "parses ChatRoom usage" do
source = """
(def room (spawn (fn [] (ChatRoom/loop {:owner "alice" :members {}}))))
(send room [:join "alice" *self*])
(send room [:join "bob" *self*])
(send room [:message "bob" "hey everyone"])
"""
{:ok, forms} = Reader.read_string(source)
assert length(forms) == 4
# First form: (def room ...)
[def_form | _] = forms
assert {:list, _, [{:symbol, _, "def"}, {:symbol, _, "room"}, spawn_call]} = def_form
assert {:list, _, [{:symbol, _, "spawn"}, _fn_form]} = spawn_call
# Last form: (send room [:message ...])
last = List.last(forms)
assert {:list, _, [{:symbol, _, "send"}, {:symbol, _, "room"}, msg_vec]} = last
assert {:vector, _, [:message, "bob", "hey everyone"]} = msg_vec
end
end
# ═══════════════════════════════════════════════════════════════════
# Complex real-world patterns
# ═══════════════════════════════════════════════════════════════════
describe "complex forms" do
test "defn with multiple clauses" do
source = "(defn greet ([name] (greet name \"hello\")) ([name greeting] (str greeting \" \" name)))"
{:ok, [form]} = Reader.read_string(source)
assert {:list, _, [{:symbol, _, "defn"}, {:symbol, _, "greet"} | clauses]} = form
assert length(clauses) == 2
end
test "let with destructuring" do
source = "(let [{:keys [name email]} user] (str name \" <\" email \">\"))"
{:ok, [form]} = Reader.read_string(source)
assert {:list, _, [{:symbol, _, "let"}, {:vector, _, _bindings}, _body]} = form
end
test "metadata on defmodule" do
source = "(defmodule ^{:author \"Ada\"} Greeter (defn hello [name] (str \"hello \" name)))"
{:ok, [form]} = Reader.read_string(source)
assert {:list, _, [{:symbol, _, "defmodule"}, {:with_meta, _, _}, _greeter | _]} = form
end
test "cond form" do
source = """
(cond
(< x 0) "negative"
(= x 0) "zero"
:else "positive")
"""
{:ok, [form]} = Reader.read_string(source)
assert {:list, _, [{:symbol, _, "cond"} | clauses]} = form
# 6 elements: 3 test/result pairs
assert length(clauses) == 6
end
test "quasiquote with unquote and splice-unquote" do
source = "`(defn ~name [~@args] ~@body)"
{:ok, [form]} = Reader.read_string(source)
assert {:quasiquote, _, {:list, _, elements}} = form
assert {:symbol, _, "defn"} = Enum.at(elements, 0)
assert {:unquote, _, {:symbol, _, "name"}} = Enum.at(elements, 1)
end
test "nested tuples and sets" do
source = "#el[:ok \#{:a :b}]"
{:ok, [form]} = Reader.read_string(source)
assert {:tuple, _, [:ok, {:set, _, [:a, :b]}]} = form
end
end
end
File diff suppressed because it is too large Load Diff
+1
View File
@@ -0,0 +1 @@
ExUnit.start()