summaryrefslogtreecommitdiff
path: root/lib/elixir
diff options
context:
space:
mode:
authorJosé Valim <jose.valim@plataformatec.com.br>2014-04-21 18:58:36 +0200
committerJosé Valim <jose.valim@plataformatec.com.br>2014-04-21 19:06:35 +0200
commit56f15d9f4e609b8ee219b829ec6c3e013efc397c (patch)
tree06e4417b997d786b5ee2920c7de38cf43259fc56 /lib/elixir
parent7ccdb2ab299cb7d76b55aa992272205d67948ce8 (diff)
downloadelixir-56f15d9f4e609b8ee219b829ec6c3e013efc397c.tar.gz
Do not add spaces after { and before }
This makes the source code consistent with the result returned by inspect/2.
Diffstat (limited to 'lib/elixir')
-rw-r--r--lib/elixir/include/elixir.hrl6
-rw-r--r--lib/elixir/lib/access.ex10
-rw-r--r--lib/elixir/lib/application/behaviour.ex4
-rw-r--r--lib/elixir/lib/base.ex30
-rw-r--r--lib/elixir/lib/behaviour.ex28
-rw-r--r--lib/elixir/lib/code.ex66
-rw-r--r--lib/elixir/lib/collectable.ex28
-rw-r--r--lib/elixir/lib/dict.ex34
-rw-r--r--lib/elixir/lib/dict/behaviour.ex30
-rw-r--r--lib/elixir/lib/enum.ex404
-rw-r--r--lib/elixir/lib/exception.ex46
-rw-r--r--lib/elixir/lib/file.ex256
-rw-r--r--lib/elixir/lib/float.ex18
-rw-r--r--lib/elixir/lib/gen_event/behaviour.ex16
-rw-r--r--lib/elixir/lib/gen_server/behaviour.ex36
-rw-r--r--lib/elixir/lib/hash_dict.ex46
-rw-r--r--lib/elixir/lib/hash_set.ex26
-rw-r--r--lib/elixir/lib/inspect.ex10
-rw-r--r--lib/elixir/lib/inspect/algebra.ex6
-rw-r--r--lib/elixir/lib/integer.ex8
-rw-r--r--lib/elixir/lib/io.ex36
-rw-r--r--lib/elixir/lib/io/ansi.ex4
-rw-r--r--lib/elixir/lib/io/ansi/docs.ex26
-rw-r--r--lib/elixir/lib/kernel.ex190
-rw-r--r--lib/elixir/lib/kernel/cli.ex70
-rw-r--r--lib/elixir/lib/kernel/error_handler.ex10
-rw-r--r--lib/elixir/lib/kernel/lexical_tracker.ex60
-rw-r--r--lib/elixir/lib/kernel/parallel_compiler.ex60
-rw-r--r--lib/elixir/lib/kernel/parallel_require.ex18
-rw-r--r--lib/elixir/lib/kernel/special_forms.ex70
-rw-r--r--lib/elixir/lib/kernel/typespec.ex278
-rw-r--r--lib/elixir/lib/keyword.ex54
-rw-r--r--lib/elixir/lib/list.ex32
-rw-r--r--lib/elixir/lib/list_dict.ex38
-rw-r--r--lib/elixir/lib/macro.ex228
-rw-r--r--lib/elixir/lib/macro/env.ex24
-rw-r--r--lib/elixir/lib/map.ex14
-rw-r--r--lib/elixir/lib/module.ex112
-rw-r--r--lib/elixir/lib/module/locals_tracker.ex134
-rw-r--r--lib/elixir/lib/option_parser.ex92
-rw-r--r--lib/elixir/lib/path.ex30
-rw-r--r--lib/elixir/lib/process.ex8
-rw-r--r--lib/elixir/lib/protocol.ex30
-rw-r--r--lib/elixir/lib/protocol/consolidation.ex56
-rw-r--r--lib/elixir/lib/range.ex34
-rw-r--r--lib/elixir/lib/record.ex18
-rw-r--r--lib/elixir/lib/record/backend.ex30
-rw-r--r--lib/elixir/lib/record/deprecated.ex98
-rw-r--r--lib/elixir/lib/record/extractor.ex32
-rw-r--r--lib/elixir/lib/regex.ex44
-rw-r--r--lib/elixir/lib/set.ex24
-rw-r--r--lib/elixir/lib/stream.ex272
-rw-r--r--lib/elixir/lib/stream/reducers.ex42
-rw-r--r--lib/elixir/lib/string.ex30
-rw-r--r--lib/elixir/lib/string_io.ex176
-rw-r--r--lib/elixir/lib/supervisor/behaviour.ex10
-rw-r--r--lib/elixir/lib/system.ex18
-rw-r--r--lib/elixir/lib/tuple.ex10
-rw-r--r--lib/elixir/lib/uri.ex26
-rw-r--r--lib/elixir/lib/version.ex124
-rw-r--r--lib/elixir/src/elixir.erl54
-rw-r--r--lib/elixir/src/elixir_aliases.erl36
-rw-r--r--lib/elixir/src/elixir_bitstring.erl74
-rw-r--r--lib/elixir/src/elixir_bootstrap.erl16
-rw-r--r--lib/elixir/src/elixir_clauses.erl116
-rw-r--r--lib/elixir/src/elixir_code_server.erl98
-rw-r--r--lib/elixir/src/elixir_compiler.erl52
-rw-r--r--lib/elixir/src/elixir_counter.erl12
-rw-r--r--lib/elixir/src/elixir_def.erl156
-rw-r--r--lib/elixir/src/elixir_def_defaults.erl30
-rw-r--r--lib/elixir/src/elixir_def_overridable.erl40
-rw-r--r--lib/elixir/src/elixir_dispatch.erl370
-rw-r--r--lib/elixir/src/elixir_env.erl8
-rw-r--r--lib/elixir/src/elixir_errors.erl36
-rw-r--r--lib/elixir/src/elixir_exp.erl354
-rw-r--r--lib/elixir/src/elixir_exp_clauses.erl158
-rw-r--r--lib/elixir/src/elixir_fn.erl112
-rw-r--r--lib/elixir/src/elixir_for.erl194
-rw-r--r--lib/elixir/src/elixir_import.erl52
-rw-r--r--lib/elixir/src/elixir_interpolation.erl18
-rw-r--r--lib/elixir/src/elixir_lexical.erl10
-rw-r--r--lib/elixir/src/elixir_locals.erl52
-rw-r--r--lib/elixir/src/elixir_map.erl88
-rw-r--r--lib/elixir/src/elixir_module.erl172
-rw-r--r--lib/elixir/src/elixir_parser.yrl260
-rw-r--r--lib/elixir/src/elixir_quote.erl186
-rw-r--r--lib/elixir/src/elixir_scope.erl50
-rw-r--r--lib/elixir/src/elixir_sup.erl8
-rw-r--r--lib/elixir/src/elixir_tokenizer.erl362
-rw-r--r--lib/elixir/src/elixir_translator.erl258
-rw-r--r--lib/elixir/src/elixir_try.erl120
-rw-r--r--lib/elixir/src/elixir_utils.erl92
-rw-r--r--lib/elixir/test/elixir/access_test.exs8
-rw-r--r--lib/elixir/test/elixir/base_test.exs60
-rw-r--r--lib/elixir/test/elixir/behaviour_test.exs2
-rw-r--r--lib/elixir/test/elixir/char_list_test.exs8
-rw-r--r--lib/elixir/test/elixir/code_test.exs42
-rw-r--r--lib/elixir/test/elixir/deprecated_record/access_test.exs8
-rw-r--r--lib/elixir/test/elixir/deprecated_record/private_test.exs2
-rw-r--r--lib/elixir/test/elixir/deprecated_record/record_test.exs18
-rw-r--r--lib/elixir/test/elixir/dict_test.exs18
-rw-r--r--lib/elixir/test/elixir/enum_test.exs102
-rw-r--r--lib/elixir/test/elixir/exception_test.exs4
-rw-r--r--lib/elixir/test/elixir/file_test.exs122
-rw-r--r--lib/elixir/test/elixir/gen_event/behaviour_test.exs6
-rw-r--r--lib/elixir/test/elixir/gen_server/behaviour_test.exs20
-rw-r--r--lib/elixir/test/elixir/hash_dict_test.exs14
-rw-r--r--lib/elixir/test/elixir/inspect/algebra_test.exs24
-rw-r--r--lib/elixir/test/elixir/inspect_test.exs28
-rw-r--r--lib/elixir/test/elixir/io/ansi_test.exs2
-rw-r--r--lib/elixir/test/elixir/io_test.exs22
-rw-r--r--lib/elixir/test/elixir/kernel/case_test.exs16
-rw-r--r--lib/elixir/test/elixir/kernel/cli_test.exs2
-rw-r--r--lib/elixir/test/elixir/kernel/comprehension_test.exs6
-rw-r--r--lib/elixir/test/elixir/kernel/docs_test.exs2
-rw-r--r--lib/elixir/test/elixir/kernel/errors_test.exs14
-rw-r--r--lib/elixir/test/elixir/kernel/expansion_test.exs44
-rw-r--r--lib/elixir/test/elixir/kernel/fn_test.exs8
-rw-r--r--lib/elixir/test/elixir/kernel/lexical_tracker_test.exs2
-rw-r--r--lib/elixir/test/elixir/kernel/overridable_test.exs26
-rw-r--r--lib/elixir/test/elixir/kernel/quote_test.exs72
-rw-r--r--lib/elixir/test/elixir/kernel/rescue_test.exs2
-rw-r--r--lib/elixir/test/elixir/kernel/typespec_test.exs62
-rw-r--r--lib/elixir/test/elixir/kernel_test.exs16
-rw-r--r--lib/elixir/test/elixir/keyword_test.exs8
-rw-r--r--lib/elixir/test/elixir/list_test.exs18
-rw-r--r--lib/elixir/test/elixir/macro_test.exs58
-rw-r--r--lib/elixir/test/elixir/map_test.exs78
-rw-r--r--lib/elixir/test/elixir/module/locals_tracker_test.exs124
-rw-r--r--lib/elixir/test/elixir/module_test.exs30
-rw-r--r--lib/elixir/test/elixir/option_parser_test.exs64
-rw-r--r--lib/elixir/test/elixir/process_test.exs2
-rw-r--r--lib/elixir/test/elixir/protocol_test.exs42
-rw-r--r--lib/elixir/test/elixir/record_test.exs10
-rw-r--r--lib/elixir/test/elixir/regex_test.exs16
-rw-r--r--lib/elixir/test/elixir/set_test.exs8
-rw-r--r--lib/elixir/test/elixir/stream_test.exs60
-rw-r--r--lib/elixir/test/elixir/string_io_test.exs82
-rw-r--r--lib/elixir/test/elixir/string_test.exs10
-rw-r--r--lib/elixir/test/elixir/supervisor/behaviour_test.exs26
-rw-r--r--lib/elixir/test/elixir/system_test.exs4
-rw-r--r--lib/elixir/test/elixir/test_helper.exs8
-rw-r--r--lib/elixir/test/elixir/tuple_test.exs24
-rw-r--r--lib/elixir/test/elixir/version_test.exs10
-rw-r--r--lib/elixir/test/erlang/atom_test.erl4
-rw-r--r--lib/elixir/test/erlang/control_test.erl20
-rw-r--r--lib/elixir/test/erlang/function_test.erl20
-rw-r--r--lib/elixir/test/erlang/match_test.erl12
-rw-r--r--lib/elixir/test/erlang/module_test.erl16
-rw-r--r--lib/elixir/test/erlang/operators_test.erl4
-rw-r--r--lib/elixir/test/erlang/string_test.erl4
-rw-r--r--lib/elixir/test/erlang/test_helper.erl2
-rw-r--r--lib/elixir/test/erlang/tokenizer_test.erl6
-rw-r--r--lib/elixir/unicode/unicode.ex42
154 files changed, 4504 insertions, 4504 deletions
diff --git a/lib/elixir/include/elixir.hrl b/lib/elixir/include/elixir.hrl
index e52930c2f..243435d97 100644
--- a/lib/elixir/include/elixir.hrl
+++ b/lib/elixir/include/elixir.hrl
@@ -1,8 +1,8 @@
-define(wrap_call(Line, Module, Function, Args),
- { call, Line,
- { remote, Line, { atom, Line, Module }, { atom, Line, Function } },
+ {call, Line,
+ {remote, Line, {atom, Line, Module}, {atom, Line, Function}},
Args
- }).
+ }).
-define(atom_concat(Atoms), list_to_atom(lists:concat(Atoms))).
-define(elixir_macro(Macro), list_to_atom(lists:concat(['MACRO-',Macro]))).
diff --git a/lib/elixir/lib/access.ex b/lib/elixir/lib/access.ex
index fdcde1332..aa331d6cc 100644
--- a/lib/elixir/lib/access.ex
+++ b/lib/elixir/lib/access.ex
@@ -14,11 +14,11 @@ defprotocol Access do
iex> keywords[:a]
1
- iex> map = %{ a: 1, b: 2 }
+ iex> map = %{a: 1, b: 2}
iex> map[:a]
1
- iex> star_ratings = %{ 1.0 => "★", 1.5 => "★☆", 2.0 => "★★" }
+ iex> star_ratings = %{1.0 => "★", 1.5 => "★☆", 2.0 => "★★"}
iex> star_ratings[1.5]
"★☆"
@@ -43,15 +43,15 @@ defimpl Access, for: List do
do_access(dict, key)
end
- defp do_access([{ key, value }|_], key), do: value
- defp do_access([{ _, _ }|t], key), do: access(t, key)
+ defp do_access([{key, value}|_], key), do: value
+ defp do_access([{_, _}|t], key), do: access(t, key)
defp do_access([], _key), do: nil
end
defimpl Access, for: Map do
def access(map, key) do
case :maps.find(key, map) do
- { :ok, value } -> value
+ {:ok, value} -> value
:error -> nil
end
end
diff --git a/lib/elixir/lib/application/behaviour.ex b/lib/elixir/lib/application/behaviour.ex
index ba55d54bf..751606742 100644
--- a/lib/elixir/lib/application/behaviour.ex
+++ b/lib/elixir/lib/application/behaviour.ex
@@ -16,8 +16,8 @@ defmodule Application.Behaviour do
There are two callbacks which must be implemented:
- 1. `start(type, args)` - must return `{ :ok, pid }` or
- `{ :ok, pid, state }`, where `pid` is the process identifier
+ 1. `start(type, args)` - must return `{:ok, pid}` or
+ `{:ok, pid, state}`, where `pid` is the process identifier
of the supervisor tree root and `state` is application defined
state information;
diff --git a/lib/elixir/lib/base.ex b/lib/elixir/lib/base.ex
index e3a0b1e1c..0697371cb 100644
--- a/lib/elixir/lib/base.ex
+++ b/lib/elixir/lib/base.ex
@@ -15,11 +15,11 @@ defmodule Base do
b32_alphabet = Enum.with_index 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'
b32hex_alphabet = Enum.with_index '0123456789ABCDEFGHIJKLMNOPQRSTUV'
- Enum.each [ { :enc16, :dec16, b16_alphabet },
- { :enc64, :dec64, b64_alphabet },
- { :enc32, :dec32, b32_alphabet },
- { :enc64url, :dec64url, b64url_alphabet },
- { :enc32hex, :dec32hex, b32hex_alphabet } ], fn({enc, dec, alphabet}) ->
+ Enum.each [ {:enc16, :dec16, b16_alphabet},
+ {:enc64, :dec64, b64_alphabet},
+ {:enc32, :dec32, b32_alphabet},
+ {:enc64url, :dec64url, b64url_alphabet},
+ {:enc32hex, :dec32hex, b32hex_alphabet} ], fn({enc, dec, alphabet}) ->
for {encoding, value} <- alphabet do
defp unquote(enc)(unquote(value)), do: unquote(encoding)
defp unquote(dec)(unquote(encoding)), do: unquote(value)
@@ -61,9 +61,9 @@ defmodule Base do
{:ok, "foobar"}
"""
- @spec decode16(binary) :: { :ok, binary } | :error
+ @spec decode16(binary) :: {:ok, binary} | :error
def decode16(string) when is_binary(string) do
- { :ok, decode16!(string) }
+ {:ok, decode16!(string)}
rescue
ArgumentError -> :error
end
@@ -130,9 +130,9 @@ defmodule Base do
{:ok, "foobar"}
"""
- @spec decode64(binary) :: { :ok, binary } | :error
+ @spec decode64(binary) :: {:ok, binary} | :error
def decode64(string) when is_binary(string) do
- { :ok, do_decode64(string, &dec64/1) }
+ {:ok, do_decode64(string, &dec64/1)}
rescue
ArgumentError -> :error
end
@@ -203,9 +203,9 @@ defmodule Base do
{:ok, <<255,127,254,252>>}
"""
- @spec url_decode64(binary) :: { :ok, binary } | :error
+ @spec url_decode64(binary) :: {:ok, binary} | :error
def url_decode64(string) when is_binary(string) do
- { :ok, do_decode64(string, &dec64url/1) }
+ {:ok, do_decode64(string, &dec64url/1)}
rescue
ArgumentError -> :error
end
@@ -265,9 +265,9 @@ defmodule Base do
{:ok, "foobar"}
"""
- @spec decode32(binary) :: { :ok, binary } | :error
+ @spec decode32(binary) :: {:ok, binary} | :error
def decode32(string) do
- { :ok, do_decode32(string, &dec32/1) }
+ {:ok, do_decode32(string, &dec32/1)}
rescue
ArgumentError -> :error
end
@@ -328,9 +328,9 @@ defmodule Base do
{:ok, "foobar"}
"""
- @spec hex_decode32(binary) :: { :ok, binary } | :error
+ @spec hex_decode32(binary) :: {:ok, binary} | :error
def hex_decode32(string) when is_binary(string) do
- { :ok, do_decode32(string, &dec32hex/1) }
+ {:ok, do_decode32(string, &dec32hex/1)}
rescue
ArgumentError -> :error
end
diff --git a/lib/elixir/lib/behaviour.ex b/lib/elixir/lib/behaviour.ex
index 8e6bcd851..7f43c8f68 100644
--- a/lib/elixir/lib/behaviour.ex
+++ b/lib/elixir/lib/behaviour.ex
@@ -57,34 +57,34 @@ defmodule Behaviour do
do_defmacrocallback(split_spec(spec, quote(do: Macro.t)), __CALLER__)
end
- defp split_spec({ :when, _, [{ :::, _, [spec, return] }, guard] }, _default) do
- { spec, return, guard }
+ defp split_spec({:when, _, [{:::, _, [spec, return]}, guard]}, _default) do
+ {spec, return, guard}
end
- defp split_spec({ :when, _, [spec, guard] }, default) do
- { spec, default, guard }
+ defp split_spec({:when, _, [spec, guard]}, default) do
+ {spec, default, guard}
end
- defp split_spec({ :::, _, [spec, return] }, _default) do
- { spec, return, [] }
+ defp split_spec({:::, _, [spec, return]}, _default) do
+ {spec, return, []}
end
defp split_spec(spec, default) do
- { spec, default, [] }
+ {spec, default, []}
end
- defp do_defcallback({ spec, return, guards }, caller) do
+ defp do_defcallback({spec, return, guards}, caller) do
case Macro.decompose_call(spec) do
- { name, args } ->
+ {name, args} ->
do_callback(:def, name, args, name, length(args), args, return, guards, caller)
_ ->
raise ArgumentError, message: "invalid syntax in defcallback #{Macro.to_string(spec)}"
end
end
- defp do_defmacrocallback({ spec, return, guards }, caller) do
+ defp do_defmacrocallback({spec, return, guards}, caller) do
case Macro.decompose_call(spec) do
- { name, args } ->
+ {name, args} ->
do_callback(:defmacro, :"MACRO-#{name}", [quote(do: env :: Macro.Env.t)|args],
name, length(args), args, return, guards, caller)
_ ->
@@ -94,7 +94,7 @@ defmodule Behaviour do
defp do_callback(kind, name, args, docs_name, docs_arity, _docs_args, return, guards, caller) do
Enum.each args, fn
- { :::, _, [left, right] } ->
+ {:::, _, [left, right]} ->
ensure_not_default(left)
ensure_not_default(right)
left
@@ -110,7 +110,7 @@ defmodule Behaviour do
end
end
- defp ensure_not_default({ :\\, _, [_, _] }) do
+ defp ensure_not_default({:\\, _, [_, _]}) do
raise ArgumentError, message: "default arguments \\\\ not supported in defcallback/defmacrocallback"
end
@@ -120,7 +120,7 @@ defmodule Behaviour do
def store_docs(module, line, kind, name, arity) do
doc = Module.get_attribute module, :doc
Module.delete_attribute module, :doc
- Module.put_attribute module, :behaviour_docs, { { name, arity }, line, kind, doc }
+ Module.put_attribute module, :behaviour_docs, {{name, arity}, line, kind, doc}
end
@doc false
diff --git a/lib/elixir/lib/code.ex b/lib/elixir/lib/code.ex
index 8e7790c4a..8949de3ef 100644
--- a/lib/elixir/lib/code.ex
+++ b/lib/elixir/lib/code.ex
@@ -28,7 +28,7 @@ defmodule Code do
allowing them to be required again.
"""
def unload_files(files) do
- :elixir_code_server.cast { :unload_files, files }
+ :elixir_code_server.cast {:unload_files, files}
end
@doc """
@@ -88,7 +88,7 @@ defmodule Code do
`:macros` will no longer auto-import `Kernel` macros like `if`, `case`,
etc.
- Returns a tuple of the form `{ value, binding }`,
+ Returns a tuple of the form `{value, binding}`,
where `value` is the the value returned from evaluating `string`.
If an error occurs while evaluating `string` an exception will be raised.
@@ -118,14 +118,14 @@ defmodule Code do
def eval_string(string, binding \\ [], opts \\ [])
def eval_string(string, binding, Macro.Env[] = env) do
- { value, binding, _env, _scope } = :elixir.eval to_char_list(string), binding, env.to_keywords
- { value, binding }
+ {value, binding, _env, _scope} = :elixir.eval to_char_list(string), binding, env.to_keywords
+ {value, binding}
end
def eval_string(string, binding, opts) when is_list(opts) do
validate_eval_opts(opts)
- { value, binding, _env, _scope } = :elixir.eval to_char_list(string), binding, opts
- { value, binding }
+ {value, binding, _env, _scope} = :elixir.eval to_char_list(string), binding, opts
+ {value, binding}
end
@doc """
@@ -150,14 +150,14 @@ defmodule Code do
def eval_quoted(quoted, binding \\ [], opts \\ [])
def eval_quoted(quoted, binding, Macro.Env[] = env) do
- { value, binding, _env, _scope } = :elixir.eval_quoted quoted, binding, env.to_keywords
- { value, binding }
+ {value, binding, _env, _scope} = :elixir.eval_quoted quoted, binding, env.to_keywords
+ {value, binding}
end
def eval_quoted(quoted, binding, opts) when is_list(opts) do
validate_eval_opts(opts)
- { value, binding, _env, _scope } = :elixir.eval_quoted quoted, binding, opts
- { value, binding }
+ {value, binding, _env, _scope} = :elixir.eval_quoted quoted, binding, opts
+ {value, binding}
end
defp validate_eval_opts(opts) do
@@ -176,32 +176,32 @@ defmodule Code do
end
defp validate_aliases(kind, aliases) do
- valid = is_list(aliases) and Enum.all?(aliases, fn { k, v } ->
+ valid = is_list(aliases) and Enum.all?(aliases, fn {k, v} ->
is_atom(k) and is_atom(v)
end)
unless valid do
- raise ArgumentError, message: "expected :#{kind} option given to eval in the format: [{ module, module }]"
+ raise ArgumentError, message: "expected :#{kind} option given to eval in the format: [{module, module}]"
end
end
defp validate_imports(kind, imports) do
- valid = is_list(imports) and Enum.all?(imports, fn { k, v } ->
- is_atom(k) and is_list(v) and Enum.all?(v, fn { name, arity } ->
+ valid = is_list(imports) and Enum.all?(imports, fn {k, v} ->
+ is_atom(k) and is_list(v) and Enum.all?(v, fn {name, arity} ->
is_atom(name) and is_integer(arity)
end)
end)
unless valid do
- raise ArgumentError, message: "expected :#{kind} option given to eval in the format: [{ module, [{ name, arity }] }]"
+ raise ArgumentError, message: "expected :#{kind} option given to eval in the format: [{module, [{name, arity}]}]"
end
end
@doc """
Convert the given string to its quoted form.
- Returns `{ :ok, quoted_form }`
- if it succeeds, `{ :error, { line, error, token } }` otherwise.
+ Returns `{:ok, quoted_form}`
+ if it succeeds, `{:error, {line, error, token}}` otherwise.
## Options
@@ -261,7 +261,7 @@ defmodule Code do
Accepts `relative_to` as an argument to tell where the file is located.
If the file was already required/loaded, loads it again.
- It returns a list of tuples `{ ModuleName, <<byte_code>> }`, one tuple for
+ It returns a list of tuples `{ModuleName, <<byte_code>>}`, one tuple for
each module defined in the file.
Notice that if `load_file` is invoked by different processes concurrently,
@@ -270,9 +270,9 @@ defmodule Code do
"""
def load_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
- :elixir_code_server.call { :acquire, file }
+ :elixir_code_server.call {:acquire, file}
loaded = :elixir_compiler.file file
- :elixir_code_server.cast { :loaded, file }
+ :elixir_code_server.cast {:loaded, file}
loaded
end
@@ -294,14 +294,14 @@ defmodule Code do
def require_file(file, relative_to \\ nil) when is_binary(file) do
file = find_file(file, relative_to)
- case :elixir_code_server.call({ :acquire, file }) do
+ case :elixir_code_server.call({:acquire, file}) do
:loaded ->
nil
- { :queued, ref } ->
- receive do { :elixir_code_server, ^ref, :loaded } -> nil end
+ {:queued, ref} ->
+ receive do {:elixir_code_server, ^ref, :loaded} -> nil end
:proceed ->
loaded = :elixir_compiler.file file
- :elixir_code_server.cast { :loaded, file }
+ :elixir_code_server.cast {:loaded, file}
loaded
end
end
@@ -336,7 +336,7 @@ defmodule Code do
"""
def compiler_options(opts) do
- :elixir_code_server.cast { :compiler_options, opts }
+ :elixir_code_server.cast {:compiler_options, opts}
end
@doc """
@@ -367,8 +367,8 @@ defmodule Code do
If the module is already loaded, this works as no-op. If the module
was not yet loaded, it tries to load it.
- If it succeeds loading the module, it returns `{ :module, module }`.
- If not, returns `{ :error, reason }` with the error reason.
+ If it succeeds loading the module, it returns `{:module, module}`.
+ If not, returns `{:error, reason}` with the error reason.
## Code loading on the Erlang VM
@@ -411,7 +411,7 @@ defmodule Code do
otherwise.
"""
def ensure_loaded?(module) do
- match?({ :module, ^module }, ensure_loaded(module))
+ match?({:module, ^module}, ensure_loaded(module))
end
@doc """
@@ -421,21 +421,21 @@ defmodule Code do
not loaded yet, it checks if it needs to be compiled first and then
tries to load it.
- If it succeeds loading the module, it returns `{ :module, module }`.
- If not, returns `{ :error, reason }` with the error reason.
+ If it succeeds loading the module, it returns `{:module, module}`.
+ If not, returns `{:error, reason}` with the error reason.
Check `ensure_loaded/1` for more information on module loading
and when to use `ensure_loaded/1` or `ensure_compiled/1`.
"""
def ensure_compiled(module) when is_atom(module) do
case :code.ensure_loaded(module) do
- { :error, :nofile } = error ->
+ {:error, :nofile} = error ->
case :erlang.get(:elixir_ensure_compiled) do
:undefined -> error
_ ->
try do
module.__info__(:module)
- { :module, module }
+ {:module, module}
rescue
UndefinedFunctionError -> error
end
@@ -452,7 +452,7 @@ defmodule Code do
Returns `false` otherwise.
"""
def ensure_compiled?(module) do
- match?({ :module, ^module }, ensure_compiled(module))
+ match?({:module, ^module}, ensure_compiled(module))
end
## Helpers
diff --git a/lib/elixir/lib/collectable.ex b/lib/elixir/lib/collectable.ex
index 7efad015c..426f290a4 100644
--- a/lib/elixir/lib/collectable.ex
+++ b/lib/elixir/lib/collectable.ex
@@ -11,7 +11,7 @@ defprotocol Collectable do
If a collection implements both `Enumerable` and `Collectable`, both
operations can be combined with `Enum.traverse/2`:
- iex> Enum.traverse(%{ a: 1, b: 2 }, fn { k, v } -> { k, v * 2 } end)
+ iex> Enum.traverse(%{a: 1, b: 2}, fn {k, v} -> {k, v * 2} end)
%{a: 2, b: 4}
## Why Collectable?
@@ -38,7 +38,7 @@ defprotocol Collectable do
and `empty/1`, one can, for example, implement a traversal mechanism.
"""
- @type command :: { :cont, term } | :done | :halt
+ @type command :: {:cont, term} | :done | :halt
@doc """
Receives a collectable structure and returns an empty one.
@@ -51,7 +51,7 @@ defprotocol Collectable do
the initial accumulation value.
The returned function receives a collectable and injects a given
- value into it for every `{ :cont, term }` instruction.
+ value into it for every `{:cont, term}` instruction.
`:done` is passed when no further values will be injected, useful
for closing resources and normalizing values. A collectable must
@@ -60,7 +60,7 @@ defprotocol Collectable do
If injection is suddenly interrupted, `:halt` is passed and it can
return any value, as it won't be used.
"""
- @spec into(t) :: { term, (term, command -> t | term) }
+ @spec into(t) :: {term, (term, command -> t | term)}
def into(collectable)
end
@@ -70,11 +70,11 @@ defimpl Collectable, for: List do
end
def into(original) do
- { [], fn
- list, { :cont, x } -> [x|list]
+ {[], fn
+ list, {:cont, x} -> [x|list]
list, :done -> original ++ :lists.reverse(list)
_, :halt -> :ok
- end }
+ end}
end
end
@@ -84,11 +84,11 @@ defimpl Collectable, for: BitString do
end
def into(original) do
- { original, fn
- bitstring, { :cont, x } -> <<bitstring :: bits, x :: bits>>
+ {original, fn
+ bitstring, {:cont, x} -> <<bitstring :: bits, x :: bits>>
bitstring, :done -> bitstring
_, :halt -> :ok
- end }
+ end}
end
end
@@ -98,7 +98,7 @@ defimpl Collectable, for: Function do
end
def into(function) do
- { function, function }
+ {function, function}
end
end
@@ -108,10 +108,10 @@ defimpl Collectable, for: Map do
end
def into(original) do
- { original, fn
- map, { :cont, { k, v } } -> :maps.put(k, v, map)
+ {original, fn
+ map, {:cont, {k, v}} -> :maps.put(k, v, map)
map, :done -> map
_, :halt -> :ok
- end }
+ end}
end
end
diff --git a/lib/elixir/lib/dict.ex b/lib/elixir/lib/dict.ex
index 2b7f3e12a..241e36c7b 100644
--- a/lib/elixir/lib/dict.ex
+++ b/lib/elixir/lib/dict.ex
@@ -45,7 +45,7 @@ defmodule Dict do
defcallback equal?(t, t) :: boolean
defcallback get(t, key) :: value
defcallback get(t, key, value) :: value
- defcallback fetch(t, key) :: { :ok, value } | :error
+ defcallback fetch(t, key) :: {:ok, value} | :error
defcallback fetch!(t, key) :: value | no_return
defcallback has_key?(t, key) :: boolean
defcallback keys(t) :: [key]
@@ -164,14 +164,14 @@ defmodule Dict do
end
@doc """
- Returns `{ :ok, value }` associated with `key` in `dict`.
+ Returns `{:ok, value}` associated with `key` in `dict`.
If `dict` does not contain `key`, returns `:error`.
## Examples
iex> d = Enum.into([a: 1], dict_impl.new)
iex> Dict.fetch(d, :a)
- { :ok, 1 }
+ {:ok, 1}
iex> Dict.fetch(d, :b)
:error
@@ -287,8 +287,8 @@ defmodule Dict do
if target1 == target2 do
target1.merge(dict1, dict2, fun)
else
- Enumerable.reduce(dict2, { :cont, dict1 }, fn({ k, v }, acc) ->
- { :cont, target1.update(acc, k, v, fn(other) -> fun.(k, other, v) end) }
+ Enumerable.reduce(dict2, {:cont, dict1}, fn({k, v}, acc) ->
+ {:cont, target1.update(acc, k, v, fn(other) -> fun.(k, other, v) end)}
end) |> elem(1)
end
end
@@ -365,19 +365,19 @@ defmodule Dict do
## Examples
iex> d = Enum.into([a: 1, b: 2, c: 3, d: 4], dict_impl.new)
- iex> { d1, d2 } = Dict.split(d, [:a, :c, :e])
- iex> { Dict.to_list(d1) |> Enum.sort, Dict.to_list(d2) |> Enum.sort }
- { [a: 1, c: 3], [b: 2, d: 4] }
+ iex> {d1, d2} = Dict.split(d, [:a, :c, :e])
+ iex> {Dict.to_list(d1) |> Enum.sort, Dict.to_list(d2) |> Enum.sort}
+ {[a: 1, c: 3], [b: 2, d: 4]}
iex> d = Enum.into([], dict_impl.new)
- iex> { d1, d2 } = Dict.split(d, [:a, :c])
- iex> { Dict.to_list(d1), Dict.to_list(d2) }
- { [], [] }
+ iex> {d1, d2} = Dict.split(d, [:a, :c])
+ iex> {Dict.to_list(d1), Dict.to_list(d2)}
+ {[], []}
iex> d = Enum.into([a: 1, b: 2], dict_impl.new)
- iex> { d1, d2 } = Dict.split(d, [:a, :b, :c])
- iex> { Dict.to_list(d1) |> Enum.sort, Dict.to_list(d2) }
- { [a: 1, b: 2], [] }
+ iex> {d1, d2} = Dict.split(d, [:a, :b, :c])
+ iex> {Dict.to_list(d1) |> Enum.sort, Dict.to_list(d2)}
+ {[a: 1, b: 2], []}
"""
@spec split(t, [key]) :: {t, t}
@@ -464,10 +464,10 @@ defmodule Dict do
target1.equal?(dict1, dict2)
target1.size(dict1) == target2.size(dict2) ->
- Enumerable.reduce(dict2, { :cont, true }, fn({ k, v }, _acc) ->
+ Enumerable.reduce(dict2, {:cont, true}, fn({k, v}, _acc) ->
case target1.fetch(dict1, k) do
- { :ok, ^v } -> { :cont, true }
- _ -> { :halt, false }
+ {:ok, ^v} -> {:cont, true}
+ _ -> {:halt, false}
end
end) |> elem(1)
diff --git a/lib/elixir/lib/dict/behaviour.ex b/lib/elixir/lib/dict/behaviour.ex
index 4495c44c9..243c8d730 100644
--- a/lib/elixir/lib/dict/behaviour.ex
+++ b/lib/elixir/lib/dict/behaviour.ex
@@ -71,20 +71,20 @@ defmodule Dict.Behaviour do
def get(dict, key, default \\ nil) do
case fetch(dict, key) do
- { :ok, value } -> value
+ {:ok, value} -> value
:error -> default
end
end
def fetch!(dict, key) do
case fetch(dict, key) do
- { :ok, value } -> value
+ {:ok, value} -> value
:error -> raise KeyError, key: key, term: dict
end
end
def has_key?(dict, key) do
- match? { :ok, _ }, fetch(dict, key)
+ match? {:ok, _}, fetch(dict, key)
end
def put_new(dict, key, value) do
@@ -98,27 +98,27 @@ defmodule Dict.Behaviour do
def take(dict, keys) do
Enum.reduce keys, new, fn key, acc ->
case fetch(dict, key) do
- { :ok, value } -> put(acc, key, value)
+ {:ok, value} -> put(acc, key, value)
:error -> acc
end
end
end
def to_list(dict) do
- reduce(dict, { :cont, [] }, fn
- kv, acc -> { :cont, [kv|acc] }
+ reduce(dict, {:cont, []}, fn
+ kv, acc -> {:cont, [kv|acc]}
end) |> elem(1) |> :lists.reverse
end
def keys(dict) do
- reduce(dict, { :cont, [] }, fn
- {k, _}, acc -> { :cont, [k|acc] }
+ reduce(dict, {:cont, []}, fn
+ {k, _}, acc -> {:cont, [k|acc]}
end) |> elem(1) |> :lists.reverse
end
def values(dict) do
- reduce(dict, { :cont, [] }, fn
- {_, v}, acc -> { :cont, [v|acc] }
+ reduce(dict, {:cont, []}, fn
+ {_, v}, acc -> {:cont, [v|acc]}
end) |> elem(1) |> :lists.reverse
end
@@ -129,18 +129,18 @@ defmodule Dict.Behaviour do
case size(dict1) == size(dict2) do
false -> false
true ->
- reduce(dict1, { :cont, true }, fn({ k, v }, _acc) ->
+ reduce(dict1, {:cont, true}, fn({k, v}, _acc) ->
case fetch(dict2, k) do
- { :ok, ^v } -> { :cont, true }
- _ -> { :halt, false }
+ {:ok, ^v} -> {:cont, true}
+ _ -> {:halt, false}
end
end) |> elem(1)
end
end
def merge(dict1, dict2, fun \\ fn(_k, _v1, v2) -> v2 end) do
- reduce(dict1, { :cont, dict2 }, fn { k, v1 }, acc ->
- { :cont, update(acc, k, v1, &fun.(k, v1, &1)) }
+ reduce(dict1, {:cont, dict2}, fn {k, v1}, acc ->
+ {:cont, update(acc, k, v1, &fun.(k, v1, &1))}
end) |> elem(1)
end
diff --git a/lib/elixir/lib/enum.ex b/lib/elixir/lib/enum.ex
index 7923845cd..22cd4d1c3 100644
--- a/lib/elixir/lib/enum.ex
+++ b/lib/elixir/lib/enum.ex
@@ -16,8 +16,8 @@ defprotocol Enumerable do
Internally, `Enum.map/2` is implemented as follows:
def map(enum, fun) do
- reducer = fn x, acc -> { :cont, [fun.(x)|acc] } end
- Enumerable.reduce(enum, { :cont, [] }, reducer) |> elem(1) |> :lists.reverse()
+ reducer = fn x, acc -> {:cont, [fun.(x)|acc]} end
+ Enumerable.reduce(enum, {:cont, []}, reducer) |> elem(1) |> :lists.reverse()
end
Notice the user given function is wrapped into a `reducer` function.
@@ -50,7 +50,7 @@ defprotocol Enumerable do
In case a reducer function returns a `:suspend` accumulator,
it must be explicitly handled by the caller and never leak.
"""
- @type acc :: { :cont, term } | { :halt, term } | { :suspend, term }
+ @type acc :: {:cont, term} | {:halt, term} | {:suspend, term}
@typedoc """
The reducer function.
@@ -76,7 +76,7 @@ defprotocol Enumerable do
Furthermore, a `:suspend` call must always be followed by another call,
eventually halting or continuing until the end.
"""
- @type result :: { :done, term } | { :halted, term } | { :suspended, term, continuation }
+ @type result :: {:done, term} | {:halted, term} | {:suspended, term, continuation}
@typedoc """
A partially applied reduce function.
@@ -102,10 +102,10 @@ defprotocol Enumerable do
As an example, here is the implementation of `reduce` for lists:
- def reduce(_, { :halt, acc }, _fun), do: { :halted, acc }
- def reduce(list, { :suspend, acc }, fun), do: { :suspended, acc, &reduce(list, &1, fun) }
- def reduce([], { :cont, acc }, _fun), do: { :done, acc }
- def reduce([h|t], { :cont, acc }, fun), do: reduce(t, fun.(h, acc), fun)
+ def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
+ def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)}
+ def reduce([], {:cont, acc}, _fun), do: {:done, acc}
+ def reduce([h|t], {:cont, acc}, fun), do: reduce(t, fun.(h, acc), fun)
"""
@spec reduce(t, acc, reducer) :: result
@@ -114,29 +114,29 @@ defprotocol Enumerable do
@doc """
Checks if a value exists within the collection.
- It should return `{ :ok, boolean }`.
+ It should return `{:ok, boolean}`.
- If `{ :error, __MODULE__ }` is returned a default algorithm using `reduce` and
+ If `{:error, __MODULE__}` is returned a default algorithm using `reduce` and
the match (`===`) operator is used. This algorithm runs in linear time.
Please force use of the default algorithm unless you can implement an
algorithm that is significantly faster.
"""
- @spec member?(t, term) :: { :ok, boolean } | { :error, module }
+ @spec member?(t, term) :: {:ok, boolean} | {:error, module}
def member?(collection, value)
@doc """
Retrieves the collection's size.
- It should return `{ :ok, size }`.
+ It should return `{:ok, size}`.
- If `{ :error, __MODULE__ }` is returned a default algorithm using `reduce` and
+ If `{:error, __MODULE__}` is returned a default algorithm using `reduce` and
the match (`===`) operator is used. This algorithm runs in linear time.
Please force use of the default algorithm unless you can implement an
algorithm that is significantly faster.
"""
- @spec count(t) :: { :ok, non_neg_integer } | { :error, module }
+ @spec count(t) :: {:ok, non_neg_integer} | {:error, module}
def count(collection)
end
@@ -151,10 +151,10 @@ defmodule Enum do
[2,4,6]
Some particular types, like dictionaries, yield a specific format on
- enumeration. For dicts, the argument is always a `{ key, value }` tuple:
+ enumeration. For dicts, the argument is always a `{key, value}` tuple:
iex> dict = %{a: 1, b: 2}
- iex> Enum.map(dict, fn { k, v } -> { k, v * 2 } end)
+ iex> Enum.map(dict, fn {k, v} -> {k, v * 2} end)
[a: 2, b: 4]
Note that the functions in the `Enum` module are eager: they always start
@@ -181,16 +181,16 @@ defmodule Enum do
require Stream.Reducers, as: R
defmacrop cont(_, entry, acc) do
- quote do: { :cont, [unquote(entry)|unquote(acc)] }
+ quote do: {:cont, [unquote(entry)|unquote(acc)]}
end
defmacrop acc(h, n, _) do
- quote do: { unquote(h), unquote(n) }
+ quote do: {unquote(h), unquote(n)}
end
defmacrop cont_with_acc(f, entry, h, n, _) do
quote do
- { :cont, { [unquote(entry)|unquote(h)], unquote(n) } }
+ {:cont, {[unquote(entry)|unquote(h)], unquote(n)}}
end
end
@@ -226,8 +226,8 @@ defmodule Enum do
end
def all?(collection, fun) do
- Enumerable.reduce(collection, { :cont, true }, fn(entry, _) ->
- if fun.(entry), do: { :cont, true }, else: { :halt, false }
+ Enumerable.reduce(collection, {:cont, true}, fn(entry, _) ->
+ if fun.(entry), do: {:cont, true}, else: {:halt, false}
end) |> elem(1)
end
@@ -263,8 +263,8 @@ defmodule Enum do
end
def any?(collection, fun) do
- Enumerable.reduce(collection, { :cont, false }, fn(entry, _) ->
- if fun.(entry), do: { :halt, true }, else: { :cont, false }
+ Enumerable.reduce(collection, {:cont, false}, fn(entry, _) ->
+ if fun.(entry), do: {:halt, true}, else: {:cont, false}
end) |> elem(1)
end
@@ -291,7 +291,7 @@ defmodule Enum do
@spec at(t, integer, default) :: element | default
def at(collection, n, default \\ nil) do
case fetch(collection, n) do
- { :ok, h } -> h
+ {:ok, h} -> h
:error -> default
end
end
@@ -335,8 +335,8 @@ defmodule Enum do
def chunk(coll, n, step, pad \\ nil) when n > 0 and step > 0 do
limit = :erlang.max(n, step)
- { _, { acc, { buffer, i } } } =
- Enumerable.reduce(coll, { :cont, { [], { [], 0 } } }, R.chunk(n, step, limit))
+ {_, {acc, {buffer, i}}} =
+ Enumerable.reduce(coll, {:cont, {[], {[], 0}}}, R.chunk(n, step, limit))
if nil?(pad) || i == 0 do
:lists.reverse(acc)
@@ -357,11 +357,11 @@ defmodule Enum do
"""
@spec chunk_by(t, (element -> any)) :: [list]
def chunk_by(coll, fun) do
- { _, { acc, res } } =
- Enumerable.reduce(coll, { :cont, { [], nil } }, R.chunk_by(fun))
+ {_, {acc, res}} =
+ Enumerable.reduce(coll, {:cont, {[], nil}}, R.chunk_by(fun))
case res do
- { buffer, _ } ->
+ {buffer, _} ->
:lists.reverse([:lists.reverse(buffer) | acc])
nil ->
[]
@@ -429,11 +429,11 @@ defmodule Enum do
def count(collection) do
case Enumerable.count(collection) do
- { :ok, value } when is_integer(value) ->
+ {:ok, value} when is_integer(value) ->
value
- { :error, module } ->
- module.reduce(collection, { :cont, 0 }, fn
- _, acc -> { :cont, acc + 1 }
+ {:error, module} ->
+ module.reduce(collection, {:cont, 0}, fn
+ _, acc -> {:cont, acc + 1}
end) |> elem(1)
end
end
@@ -450,8 +450,8 @@ defmodule Enum do
"""
@spec count(t, (element -> as_boolean(term))) :: non_neg_integer
def count(collection, fun) do
- Enumerable.reduce(collection, { :cont, 0 }, fn(entry, acc) ->
- { :cont, if(fun.(entry), do: acc + 1, else: acc) }
+ Enumerable.reduce(collection, {:cont, 0}, fn(entry, acc) ->
+ {:cont, if(fun.(entry), do: acc + 1, else: acc)}
end) |> elem(1)
end
@@ -512,8 +512,8 @@ defmodule Enum do
end
def drop_while(collection, fun) do
- { _, { res, _ } } =
- Enumerable.reduce(collection, { :cont, { [], true } }, R.drop_while(fun))
+ {_, {res, _}} =
+ Enumerable.reduce(collection, {:cont, {[], true}}, R.drop_while(fun))
:lists.reverse(res)
end
@@ -561,12 +561,12 @@ defmodule Enum do
end
def empty?(collection) do
- Enumerable.reduce(collection, { :cont, true }, fn(_, _) -> { :halt, false } end) |> elem(1)
+ Enumerable.reduce(collection, {:cont, true}, fn(_, _) -> {:halt, false} end) |> elem(1)
end
@doc """
Finds the element at the given index (zero-based).
- Returns `{ :ok, element }` if found, otherwise `:error`.
+ Returns `{:ok, element}` if found, otherwise `:error`.
A negative index can be passed, which means the collection is
enumerated once and the index is counted from the end (i.e.
@@ -575,33 +575,33 @@ defmodule Enum do
## Examples
iex> Enum.fetch([2, 4, 6], 0)
- { :ok, 2 }
+ {:ok, 2}
iex> Enum.fetch([2, 4, 6], 2)
- { :ok, 6 }
+ {:ok, 6}
iex> Enum.fetch([2, 4, 6], 4)
:error
"""
- @spec fetch(t, integer) :: { :ok, element } | :error
+ @spec fetch(t, integer) :: {:ok, element} | :error
def fetch(collection, n) when is_list(collection) and n >= 0 do
do_fetch(collection, n)
end
def fetch(collection, n) when n >= 0 do
res =
- Enumerable.reduce(collection, { :cont, 0 }, fn(entry, acc) ->
+ Enumerable.reduce(collection, {:cont, 0}, fn(entry, acc) ->
if acc == n do
- { :halt, entry }
+ {:halt, entry}
else
- { :cont, acc + 1 }
+ {:cont, acc + 1}
end
end)
case res do
- { :halted, entry } -> { :ok, entry }
- { :done, _ } -> :error
+ {:halted, entry} -> {:ok, entry}
+ {:done, _} -> :error
end
end
@@ -629,7 +629,7 @@ defmodule Enum do
@spec fetch!(t, integer) :: element | no_return
def fetch!(collection, n) do
case fetch(collection, n) do
- { :ok, h } -> h
+ {:ok, h} -> h
:error -> raise Enum.OutOfBoundsError
end
end
@@ -650,7 +650,7 @@ defmodule Enum do
end
def filter(collection, fun) do
- Enumerable.reduce(collection, { :cont, [] }, R.filter(fun))
+ Enumerable.reduce(collection, {:cont, []}, R.filter(fun))
|> elem(1) |> :lists.reverse
end
@@ -669,7 +669,7 @@ defmodule Enum do
end
def filter_map(collection, filter, mapper) do
- Enumerable.reduce(collection, { :cont, [] }, R.filter_map(filter, mapper))
+ Enumerable.reduce(collection, {:cont, []}, R.filter_map(filter, mapper))
|> elem(1) |> :lists.reverse
end
@@ -698,8 +698,8 @@ defmodule Enum do
end
def find(collection, ifnone, fun) do
- Enumerable.reduce(collection, { :cont, ifnone }, fn(entry, ifnone) ->
- if fun.(entry), do: { :halt, entry }, else: { :cont, ifnone }
+ Enumerable.reduce(collection, {:cont, ifnone}, fn(entry, ifnone) ->
+ if fun.(entry), do: {:halt, entry}, else: {:cont, ifnone}
end) |> elem(1)
end
@@ -725,9 +725,9 @@ defmodule Enum do
end
def find_value(collection, ifnone, fun) do
- Enumerable.reduce(collection, { :cont, ifnone }, fn(entry, ifnone) ->
+ Enumerable.reduce(collection, {:cont, ifnone}, fn(entry, ifnone) ->
fun_entry = fun.(entry)
- if fun_entry, do: { :halt, fun_entry }, else: { :cont, ifnone }
+ if fun_entry, do: {:halt, fun_entry}, else: {:cont, ifnone}
end) |> elem(1)
end
@@ -751,13 +751,13 @@ defmodule Enum do
def find_index(collection, fun) do
res =
- Enumerable.reduce(collection, { :cont, 0 }, fn(entry, acc) ->
- if fun.(entry), do: { :halt, acc }, else: { :cont, acc + 1 }
+ Enumerable.reduce(collection, {:cont, 0}, fn(entry, acc) ->
+ if fun.(entry), do: {:halt, acc}, else: {:cont, acc + 1}
end)
case res do
- { :halted, entry } -> entry
- { :done, _ } -> nil
+ {:halted, entry} -> entry
+ {:done, _} -> nil
end
end
@@ -796,26 +796,26 @@ defmodule Enum do
iex> enum = 1..100
iex> n = 3
iex> Enum.flat_map_reduce(enum, 0, fn i, acc ->
- ...> if acc < n, do: { [i], acc + 1 }, else: { :halt, acc }
+ ...> if acc < n, do: {[i], acc + 1}, else: {:halt, acc}
...> end)
- { [1,2,3], 3 }
+ {[1,2,3], 3}
"""
- @spec flat_map_reduce(t, acc, fun) :: { [any], any } when
- fun: (element, acc -> { t, acc } | { :halt, acc }),
+ @spec flat_map_reduce(t, acc, fun) :: {[any], any} when
+ fun: (element, acc -> {t, acc} | {:halt, acc}),
acc: any
def flat_map_reduce(collection, acc, fun) do
- { _, { list, acc } } =
- Enumerable.reduce(collection, { :cont, { [], acc } }, fn(entry, { list, acc }) ->
+ {_, {list, acc}} =
+ Enumerable.reduce(collection, {:cont, {[], acc}}, fn(entry, {list, acc}) ->
case fun.(entry, acc) do
- { :halt, acc } ->
- { :halt, { list, acc } }
- { entries, acc } ->
- { :cont, { reduce(entries, list, &[&1|&2]), acc } }
+ {:halt, acc} ->
+ {:halt, {list, acc}}
+ {entries, acc} ->
+ {:cont, {reduce(entries, list, &[&1|&2]), acc}}
end
end)
- { :lists.reverse(list), acc }
+ {:lists.reverse(list), acc}
end
@doc """
@@ -870,9 +870,9 @@ defmodule Enum do
end
def into(collection, collectable) do
- { initial, fun } = Collectable.into(collectable)
+ {initial, fun} = Collectable.into(collectable)
into(collection, initial, fun, fn x, acc ->
- fun.(acc, { :cont, x })
+ fun.(acc, {:cont, x})
end)
end
@@ -893,9 +893,9 @@ defmodule Enum do
end
def into(collection, collectable, transform) when is_function(transform, 1) do
- { initial, fun } = Collectable.into(collectable)
+ {initial, fun} = Collectable.into(collectable)
into(collection, initial, fun, fn x, acc ->
- fun.(acc, { :cont, transform.(x) })
+ fun.(acc, {:cont, transform.(x)})
end)
end
@@ -953,7 +953,7 @@ defmodule Enum do
iex> Enum.map([1, 2, 3], fn(x) -> x * 2 end)
[2, 4, 6]
- iex> Enum.map([a: 1, b: 2], fn({k, v}) -> { k, -v } end)
+ iex> Enum.map([a: 1, b: 2], fn({k, v}) -> {k, -v} end)
[a: -1, b: -2]
"""
@@ -963,7 +963,7 @@ defmodule Enum do
end
def map(collection, fun) do
- Enumerable.reduce(collection, { :cont, [] }, R.map(fun)) |> elem(1) |> :lists.reverse
+ Enumerable.reduce(collection, {:cont, []}, R.map(fun)) |> elem(1) |> :lists.reverse
end
@doc """
@@ -1002,13 +1002,13 @@ defmodule Enum do
the first element is the mapped collection and the second
one is the final accumulator.
- For dicts, the first tuple element must be a `{ key, value }`
+ For dicts, the first tuple element must be a `{key, value}`
tuple.
## Examples
- iex> Enum.map_reduce([1, 2, 3], 0, fn(x, acc) -> { x * 2, x + acc } end)
- { [2, 4, 6], 6 }
+ iex> Enum.map_reduce([1, 2, 3], 0, fn(x, acc) -> {x * 2, x + acc} end)
+ {[2, 4, 6], 6}
"""
@spec map_reduce(t, any, (element, any -> any)) :: any
@@ -1017,11 +1017,11 @@ defmodule Enum do
end
def map_reduce(collection, acc, fun) do
- { list, acc } = reduce(collection, { [], acc }, fn(entry, { list, acc }) ->
- { new_entry, acc } = fun.(entry, acc)
- { [new_entry|list], acc }
+ {list, acc} = reduce(collection, {[], acc}, fn(entry, {list, acc}) ->
+ {new_entry, acc} = fun.(entry, acc)
+ {[new_entry|list], acc}
end)
- { :lists.reverse(list), acc }
+ {:lists.reverse(list), acc}
end
@doc """
@@ -1051,9 +1051,9 @@ defmodule Enum do
"""
@spec max_by(t, (element -> any)) :: element | no_return
def max_by([h|t], fun) do
- reduce(t, { h, fun.(h) }, fn(entry, { _, fun_max } = old) ->
+ reduce(t, {h, fun.(h)}, fn(entry, {_, fun_max} = old) ->
fun_entry = fun.(entry)
- if(fun_entry > fun_max, do: { entry, fun_entry }, else: old)
+ if(fun_entry > fun_max, do: {entry, fun_entry}, else: old)
end) |> elem(0)
end
@@ -1064,16 +1064,16 @@ defmodule Enum do
def max_by(collection, fun) do
result =
reduce(collection, :first, fn
- entry, { _, fun_max } = old ->
+ entry, {_, fun_max} = old ->
fun_entry = fun.(entry)
- if(fun_entry > fun_max, do: { entry, fun_entry }, else: old)
+ if(fun_entry > fun_max, do: {entry, fun_entry}, else: old)
entry, :first ->
- { entry, fun.(entry) }
+ {entry, fun.(entry)}
end)
case result do
:first -> raise Enum.EmptyError
- { entry, _ } -> entry
+ {entry, _} -> entry
end
end
@@ -1100,12 +1100,12 @@ defmodule Enum do
def member?(collection, value) do
case Enumerable.member?(collection, value) do
- { :ok, value } when is_boolean(value) ->
+ {:ok, value} when is_boolean(value) ->
value
- { :error, module } ->
- module.reduce(collection, { :cont, false }, fn
- v, _ when v === value -> { :halt, true }
- _, _ -> { :cont, false }
+ {:error, module} ->
+ module.reduce(collection, {:cont, false}, fn
+ v, _ when v === value -> {:halt, true}
+ _, _ -> {:cont, false}
end) |> elem(1)
end
end
@@ -1137,9 +1137,9 @@ defmodule Enum do
"""
@spec min_by(t, (element -> any)) :: element | no_return
def min_by([h|t], fun) do
- reduce(t, { h, fun.(h) }, fn(entry, { _, fun_min } = old) ->
+ reduce(t, {h, fun.(h)}, fn(entry, {_, fun_min} = old) ->
fun_entry = fun.(entry)
- if(fun_entry < fun_min, do: { entry, fun_entry }, else: old)
+ if(fun_entry < fun_min, do: {entry, fun_entry}, else: old)
end) |> elem(0)
end
@@ -1150,16 +1150,16 @@ defmodule Enum do
def min_by(collection, fun) do
result =
reduce(collection, :first, fn
- entry, { _, fun_min } = old ->
+ entry, {_, fun_min} = old ->
fun_entry = fun.(entry)
- if(fun_entry < fun_min, do: { entry, fun_entry }, else: old)
+ if(fun_entry < fun_min, do: {entry, fun_entry}, else: old)
entry, :first ->
- { entry, fun.(entry) }
+ {entry, fun.(entry)}
end)
case result do
:first -> raise Enum.EmptyError
- { entry, _ } -> entry
+ {entry, _} -> entry
end
end
@@ -1187,21 +1187,21 @@ defmodule Enum do
## Examples
iex> Enum.partition([1, 2, 3], fn(x) -> rem(x, 2) == 0 end)
- { [2], [1,3] }
+ {[2], [1,3]}
"""
@spec partition(t, (element -> any)) :: {list, list}
def partition(collection, fun) do
- { acc1, acc2 } =
- reduce(collection, { [], [] }, fn(entry, { acc1, acc2 }) ->
+ {acc1, acc2} =
+ reduce(collection, {[], []}, fn(entry, {acc1, acc2}) ->
if fun.(entry) do
- { [entry|acc1], acc2 }
+ {[entry|acc1], acc2}
else
- { acc1, [entry|acc2] }
+ {acc1, [entry|acc2]}
end
end)
- { :lists.reverse(acc1), :lists.reverse(acc2) }
+ {:lists.reverse(acc1), :lists.reverse(acc2)}
end
@doc """
@@ -1215,7 +1215,7 @@ defmodule Enum do
## Examples
iex> Enum.group_by(~w{ant buffalo cat dingo}, &String.length/1)
- %{ 3 => ["cat", "ant"], 7 => ["buffalo"], 5 => ["dingo"] }
+ %{3 => ["cat", "ant"], 7 => ["buffalo"], 5 => ["dingo"]}
"""
@spec group_by(t, (element -> any)) :: HashDict
@@ -1242,8 +1242,8 @@ defmodule Enum do
end
def reduce(collection, acc, fun) do
- Enumerable.reduce(collection, { :cont, acc },
- fn x, acc -> { :cont, fun.(x, acc) } end) |> elem(1)
+ Enumerable.reduce(collection, {:cont, acc},
+ fn x, acc -> {:cont, fun.(x, acc)} end) |> elem(1)
end
@doc """
@@ -1269,16 +1269,16 @@ defmodule Enum do
def reduce(collection, fun) do
result =
- Enumerable.reduce(collection, { :cont, :first }, fn
+ Enumerable.reduce(collection, {:cont, :first}, fn
x, :first ->
- { :cont, { :acc, x } }
- x, { :acc, acc } ->
- { :cont, { :acc, fun.(x, acc) } }
+ {:cont, {:acc, x}}
+ x, {:acc, acc} ->
+ {:cont, {:acc, fun.(x, acc)}}
end) |> elem(1)
case result do
:first -> raise Enum.EmptyError
- { :acc, acc } -> acc
+ {:acc, acc} -> acc
end
end
@@ -1297,7 +1297,7 @@ defmodule Enum do
end
def reject(collection, fun) do
- Enumerable.reduce(collection, { :cont, [] }, R.reject(fun)) |> elem(1) |> :lists.reverse
+ Enumerable.reduce(collection, {:cont, []}, R.reject(fun)) |> elem(1) |> :lists.reverse
end
@doc """
@@ -1353,8 +1353,8 @@ defmodule Enum do
"""
@spec scan(t, (element, any -> any)) :: list
def scan(enum, fun) do
- { _, { res, _ } } =
- Enumerable.reduce(enum, { :cont, { [], :first } }, R.scan_2(fun))
+ {_, {res, _}} =
+ Enumerable.reduce(enum, {:cont, {[], :first}}, R.scan_2(fun))
:lists.reverse(res)
end
@@ -1371,8 +1371,8 @@ defmodule Enum do
"""
@spec scan(t, any, (element, any -> any)) :: list
def scan(enum, acc, fun) do
- { _, { res, _ } } =
- Enumerable.reduce(enum, { :cont, { [], acc } }, R.scan_3(fun))
+ {_, {res, _}} =
+ Enumerable.reduce(enum, {:cont, {[], acc}}, R.scan_3(fun))
:lists.reverse(res)
end
@@ -1398,7 +1398,7 @@ defmodule Enum do
@spec shuffle(t) :: list
def shuffle(collection) do
randomized = reduce(collection, [], fn x, acc ->
- [{ :random.uniform, x }|acc]
+ [{:random.uniform, x}|acc]
end)
unwrap(:lists.keysort(1, randomized), [])
end
@@ -1416,7 +1416,7 @@ defmodule Enum do
@spec slice(t, integer, non_neg_integer) :: list
def slice(coll, start, count) when start < 0 do
- { list, new_start } = enumerate_and_count(coll, start)
+ {list, new_start} = enumerate_and_count(coll, start)
if new_start >= 0, do: slice(list, new_start, count)
end
@@ -1425,13 +1425,13 @@ defmodule Enum do
end
def slice(coll, start, count) when start >= 0 and count > 0 do
- { start, _, list } = Enumerable.reduce(coll, { :cont, { start, count, [] } }, fn
- _entry, { start, count, _list } when start > 0 ->
- { :cont, { start-1, count, [] } }
- entry, { start, count, list } when count > 1 ->
- { :cont, { start, count-1, [entry|list] } }
- entry, { start, count, list } ->
- { :halt, { start, count, [entry|list] } }
+ {start, _, list} = Enumerable.reduce(coll, {:cont, {start, count, []}}, fn
+ _entry, {start, count, _list} when start > 0 ->
+ {:cont, {start-1, count, []}}
+ entry, {start, count, list} when count > 1 ->
+ {:cont, {start, count-1, [entry|list]}}
+ entry, {start, count, list} ->
+ {:halt, {start, count, [entry|list]}}
end) |> elem(1)
if start <= 0, do: :lists.reverse(list)
@@ -1439,8 +1439,8 @@ defmodule Enum do
def slice(coll, start, 0) do
res =
- Enumerable.reduce(coll, { :cont, start }, fn _, start ->
- if start > 0, do: { :cont, start-1 }, else: { :halt, [] }
+ Enumerable.reduce(coll, {:cont, start}, fn _, start ->
+ if start > 0, do: {:cont, start-1}, else: {:halt, []}
end) |> elem(1)
if is_list(res), do: res
end
@@ -1472,7 +1472,7 @@ defmodule Enum do
end
def slice(coll, first..last) do
- { list, count } = enumerate_and_count(coll, 0)
+ {list, count} = enumerate_and_count(coll, 0)
corr_first = if first >= 0, do: first, else: first + count
corr_last = if last >= 0, do: last, else: last + count
length = corr_last - corr_first + 1
@@ -1547,19 +1547,19 @@ defmodule Enum do
## Examples
iex> Enum.split([1, 2, 3], 2)
- { [1,2], [3] }
+ {[1,2], [3]}
iex> Enum.split([1, 2, 3], 10)
- { [1,2,3], [] }
+ {[1,2,3], []}
iex> Enum.split([1, 2, 3], 0)
- { [], [1,2,3] }
+ {[], [1,2,3]}
iex> Enum.split([1, 2, 3], -1)
- { [1,2], [3] }
+ {[1,2], [3]}
iex> Enum.split([1, 2, 3], -5)
- { [], [1,2,3] }
+ {[], [1,2,3]}
"""
@spec split(t, integer) :: {list, list}
@@ -1568,16 +1568,16 @@ defmodule Enum do
end
def split(collection, count) when count >= 0 do
- { _, list1, list2 } =
- reduce(collection, { count, [], [] }, fn(entry, { counter, acc1, acc2 }) ->
+ {_, list1, list2} =
+ reduce(collection, {count, [], []}, fn(entry, {counter, acc1, acc2}) ->
if counter > 0 do
- { counter - 1, [entry|acc1], acc2 }
+ {counter - 1, [entry|acc1], acc2}
else
- { counter, acc1, [entry|acc2] }
+ {counter, acc1, [entry|acc2]}
end
end)
- { :lists.reverse(list1), :lists.reverse(list2) }
+ {:lists.reverse(list1), :lists.reverse(list2)}
end
def split(collection, count) when count < 0 do
@@ -1590,7 +1590,7 @@ defmodule Enum do
## Examples
iex> Enum.split_while([1, 2, 3, 4], fn(x) -> x < 3 end)
- { [1, 2], [3, 4] }
+ {[1, 2], [3, 4]}
"""
@spec split_while(t, (element -> as_boolean(term))) :: {list, list}
@@ -1599,15 +1599,15 @@ defmodule Enum do
end
def split_while(collection, fun) do
- { list1, list2 } =
- reduce(collection, { [], [] }, fn
- entry, { acc1, [] } ->
- if(fun.(entry), do: { [entry|acc1], [] }, else: { acc1, [entry] })
- entry, { acc1, acc2 } ->
- { acc1, [entry|acc2] }
+ {list1, list2} =
+ reduce(collection, {[], []}, fn
+ entry, {acc1, []} ->
+ if(fun.(entry), do: {[entry|acc1], []}, else: {acc1, [entry]})
+ entry, {acc1, acc2} ->
+ {acc1, [entry|acc2]}
end)
- { :lists.reverse(list1), :lists.reverse(list2) }
+ {:lists.reverse(list1), :lists.reverse(list2)}
end
@doc """
@@ -1644,19 +1644,19 @@ defmodule Enum do
end
def take(collection, count) when count > 0 do
- { _, { res, _ } } =
- Enumerable.reduce(collection, { :cont, { [], count } }, fn(entry, { list, count }) ->
+ {_, {res, _}} =
+ Enumerable.reduce(collection, {:cont, {[], count}}, fn(entry, {list, count}) ->
if count > 1 do
- { :cont, { [entry|list], count - 1 } }
+ {:cont, {[entry|list], count - 1}}
else
- { :halt, { [entry|list], count } }
+ {:halt, {[entry|list], count}}
end
end)
:lists.reverse(res)
end
def take(collection, count) when count < 0 do
- Stream.take(collection, count).({ :cont, [] }, &{ :cont, [&1|&2] })
+ Stream.take(collection, count).({:cont, []}, &{:cont, [&1|&2]})
|> elem(1) |> :lists.reverse
end
@@ -1673,8 +1673,8 @@ defmodule Enum do
@spec take_every(t, integer) :: list
def take_every(_collection, 0), do: []
def take_every(collection, nth) do
- { _, { res, _ } } =
- Enumerable.reduce(collection, { :cont, { [], :first } }, R.take_every(nth))
+ {_, {res, _}} =
+ Enumerable.reduce(collection, {:cont, {[], :first}}, R.take_every(nth))
:lists.reverse(res)
end
@@ -1693,7 +1693,7 @@ defmodule Enum do
end
def take_while(collection, fun) do
- Enumerable.reduce(collection, { :cont, [] }, R.take_while(fun))
+ Enumerable.reduce(collection, {:cont, []}, R.take_while(fun))
|> elem(1) |> :lists.reverse
end
@@ -1723,7 +1723,7 @@ defmodule Enum do
## Examples
- iex> Enum.traverse(%{a: 1, b: 2}, fn { k, v } -> { k, v * 2 } end)
+ iex> Enum.traverse(%{a: 1, b: 2}, fn {k, v} -> {k, v * 2} end)
%{a: 2, b: 4}
"""
@@ -1757,8 +1757,8 @@ defmodule Enum do
end
def uniq(collection, fun) do
- { _, { list, _ } } =
- Enumerable.reduce(collection, { :cont, { [], [] } }, R.uniq(fun))
+ {_, {list, _}} =
+ Enumerable.reduce(collection, {:cont, {[], []}}, R.uniq(fun))
:lists.reverse(list)
end
@@ -1783,7 +1783,7 @@ defmodule Enum do
end
def zip(coll1, coll2) do
- Stream.zip(coll1, coll2).({ :cont, [] }, &{ :cont, [&1|&2] }) |> elem(1) |> :lists.reverse
+ Stream.zip(coll1, coll2).({:cont, []}, &{:cont, [&1|&2]}) |> elem(1) |> :lists.reverse
end
@doc """
@@ -1796,23 +1796,23 @@ defmodule Enum do
[{1,0},{2,1},{3,2}]
"""
- @spec with_index(t) :: list({ element, non_neg_integer })
+ @spec with_index(t) :: list({element, non_neg_integer})
def with_index(collection) do
map_reduce(collection, 0, fn x, acc ->
- { { x, acc }, acc + 1 }
+ {{x, acc}, acc + 1}
end) |> elem(0)
end
## Helpers
- @compile { :inline, to_string: 2 }
+ @compile {:inline, to_string: 2}
defp enumerate_and_count(collection, count) when is_list(collection) do
- { collection, length(collection) - abs(count) }
+ {collection, length(collection) - abs(count)}
end
defp enumerate_and_count(collection, count) do
- map_reduce(collection, -abs(count), fn(x, acc) -> { x, acc + 1 } end)
+ map_reduce(collection, -abs(count), fn(x, acc) -> {x, acc + 1} end)
end
defp to_string(mapper, entry) do
@@ -1854,7 +1854,7 @@ defmodule Enum do
## fetch
- defp do_fetch([h|_], 0), do: { :ok, h }
+ defp do_fetch([h|_], 0), do: {:ok, h}
defp do_fetch([_|t], n), do: do_fetch(t, n - 1)
defp do_fetch([], _), do: :error
@@ -1934,45 +1934,45 @@ defmodule Enum do
## sort
- defp sort_reducer(entry, { :split, y, x, r, rs, bool }, fun) do
+ defp sort_reducer(entry, {:split, y, x, r, rs, bool}, fun) do
cond do
fun.(y, entry) == bool ->
- { :split, entry, y, [x|r], rs, bool }
+ {:split, entry, y, [x|r], rs, bool}
fun.(x, entry) == bool ->
- { :split, y, entry, [x|r], rs, bool }
+ {:split, y, entry, [x|r], rs, bool}
r == [] ->
- { :split, y, x, [entry], rs, bool }
+ {:split, y, x, [entry], rs, bool}
true ->
- { :pivot, y, x, r, rs, entry, bool }
+ {:pivot, y, x, r, rs, entry, bool}
end
end
- defp sort_reducer(entry, { :pivot, y, x, r, rs, s, bool }, fun) do
+ defp sort_reducer(entry, {:pivot, y, x, r, rs, s, bool}, fun) do
cond do
fun.(y, entry) == bool ->
- { :pivot, entry, y, [x | r], rs, s, bool }
+ {:pivot, entry, y, [x | r], rs, s, bool}
fun.(x, entry) == bool ->
- { :pivot, y, entry, [x | r], rs, s, bool }
+ {:pivot, y, entry, [x | r], rs, s, bool}
fun.(s, entry) == bool ->
- { :split, entry, s, [], [[y, x | r] | rs], bool }
+ {:split, entry, s, [], [[y, x | r] | rs], bool}
true ->
- { :split, s, entry, [], [[y, x | r] | rs], bool }
+ {:split, s, entry, [], [[y, x | r] | rs], bool}
end
end
defp sort_reducer(entry, [x], fun) do
- { :split, entry, x, [], [], fun.(x, entry) }
+ {:split, entry, x, [], [], fun.(x, entry)}
end
defp sort_reducer(entry, acc, _fun) do
[entry|acc]
end
- defp sort_terminator({ :split, y, x, r, rs, bool }, fun) do
+ defp sort_terminator({:split, y, x, r, rs, bool}, fun) do
sort_merge([[y, x | r] | rs], fun, bool)
end
- defp sort_terminator({ :pivot, y, x, r, rs, s, bool }, fun) do
+ defp sort_terminator({:pivot, y, x, r, rs, s, bool}, fun) do
sort_merge([[s], [y, x | r] | rs], fun, bool)
end
@@ -2045,11 +2045,11 @@ defmodule Enum do
end
defp do_split(list, 0, acc) do
- { :lists.reverse(acc), list }
+ {:lists.reverse(acc), list}
end
defp do_split([], _, acc) do
- { :lists.reverse(acc), [] }
+ {:lists.reverse(acc), []}
end
defp do_split_reverse([h|t], counter, acc) when counter > 0 do
@@ -2057,11 +2057,11 @@ defmodule Enum do
end
defp do_split_reverse(list, 0, acc) do
- { :lists.reverse(list), acc }
+ {:lists.reverse(list), acc}
end
defp do_split_reverse([], _, acc) do
- { [], acc }
+ {[], acc}
end
## split_while
@@ -2070,12 +2070,12 @@ defmodule Enum do
if fun.(h) do
do_split_while(t, fun, [h|acc])
else
- { :lists.reverse(acc), [h|t] }
+ {:lists.reverse(acc), [h|t]}
end
end
defp do_split_while([], _, acc) do
- { :lists.reverse(acc), [] }
+ {:lists.reverse(acc), []}
end
## take
@@ -2123,7 +2123,7 @@ defmodule Enum do
## zip
defp do_zip([h1|next1], [h2|next2]) do
- [{ h1, h2 }|do_zip(next1, next2)]
+ [{h1, h2}|do_zip(next1, next2)]
end
defp do_zip(_, []), do: []
@@ -2149,15 +2149,15 @@ defmodule Enum do
end
defimpl Enumerable, for: List do
- def reduce(_, { :halt, acc }, _fun), do: { :halted, acc }
- def reduce(list, { :suspend, acc }, fun), do: { :suspended, acc, &reduce(list, &1, fun) }
- def reduce([], { :cont, acc }, _fun), do: { :done, acc }
- def reduce([h|t], { :cont, acc }, fun), do: reduce(t, fun.(h, acc), fun)
+ def reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
+ def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)}
+ def reduce([], {:cont, acc}, _fun), do: {:done, acc}
+ def reduce([h|t], {:cont, acc}, fun), do: reduce(t, fun.(h, acc), fun)
def member?(_list, _value),
- do: { :error, __MODULE__ }
+ do: {:error, __MODULE__}
def count(_list),
- do: { :error, __MODULE__ }
+ do: {:error, __MODULE__}
end
defimpl Enumerable, for: Map do
@@ -2165,21 +2165,21 @@ defimpl Enumerable, for: Map do
do_reduce(:maps.to_list(map), acc, fun)
end
- defp do_reduce(_, { :halt, acc }, _fun), do: { :halted, acc }
- defp do_reduce(list, { :suspend, acc }, fun), do: { :suspended, acc, &do_reduce(list, &1, fun) }
- defp do_reduce([], { :cont, acc }, _fun), do: { :done, acc }
- defp do_reduce([h|t], { :cont, acc }, fun), do: do_reduce(t, fun.(h, acc), fun)
+ defp do_reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
+ defp do_reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &do_reduce(list, &1, fun)}
+ defp do_reduce([], {:cont, acc}, _fun), do: {:done, acc}
+ defp do_reduce([h|t], {:cont, acc}, fun), do: do_reduce(t, fun.(h, acc), fun)
- def member?(map, { key, value }) do
- { :ok, match?({ :ok, ^value }, :maps.find(key, map)) }
+ def member?(map, {key, value}) do
+ {:ok, match?({:ok, ^value}, :maps.find(key, map))}
end
def member?(_map, _other) do
- { :ok, false }
+ {:ok, false}
end
def count(map) do
- { :ok, map_size(map) }
+ {:ok, map_size(map)}
end
end
@@ -2187,7 +2187,7 @@ defimpl Enumerable, for: Function do
def reduce(function, acc, fun),
do: function.(acc, fun)
def member?(_function, _value),
- do: { :error, __MODULE__ }
+ do: {:error, __MODULE__}
def count(_function),
- do: { :error, __MODULE__ }
+ do: {:error, __MODULE__}
end
diff --git a/lib/elixir/lib/exception.ex b/lib/elixir/lib/exception.ex
index 37a509ab6..6092d99ef 100644
--- a/lib/elixir/lib/exception.ex
+++ b/lib/elixir/lib/exception.ex
@@ -63,7 +63,7 @@ defexception BadArityError, [function: nil, args: nil] do
fun = exception.function
args = exception.args
insp = Enum.map_join(args, ", ", &inspect/1)
- { :arity, arity } = :erlang.fun_info(fun, :arity)
+ {:arity, arity} = :erlang.fun_info(fun, :arity)
"#{inspect(fun)} with arity #{arity} called with #{count(length(args), insp)}"
end
@@ -174,8 +174,8 @@ defmodule Exception do
@type stacktrace :: [stacktrace_entry]
@type stacktrace_entry ::
- { module, function, arity_or_args, location } |
- { function, arity_or_args, location }
+ {module, function, arity_or_args, location} |
+ {function, arity_or_args, location}
@typep arity_or_args :: non_neg_integer | list
@typep location :: Keyword.t
@@ -207,41 +207,41 @@ defmodule Exception do
SystemLimitError[]
end
- defp normalize_error({ :badarity, { fun, args } }) do
+ defp normalize_error({:badarity, {fun, args}}) do
BadArityError[function: fun, args: args]
end
- defp normalize_error({ :badfun, term }) do
+ defp normalize_error({:badfun, term}) do
BadFunctionError[term: term]
end
- defp normalize_error({ :badstruct, struct, term }) do
+ defp normalize_error({:badstruct, struct, term}) do
BadStructError[struct: struct, term: term]
end
- defp normalize_error({ :badmatch, term }) do
+ defp normalize_error({:badmatch, term}) do
MatchError[term: term]
end
- defp normalize_error({ :case_clause, term }) do
+ defp normalize_error({:case_clause, term}) do
CaseClauseError[term: term]
end
- defp normalize_error({ :try_clause, term }) do
+ defp normalize_error({:try_clause, term}) do
TryClauseError[term: term]
end
defp normalize_error(:undef) do
- { mod, fun, arity } = from_stacktrace(:erlang.get_stacktrace)
+ {mod, fun, arity} = from_stacktrace(:erlang.get_stacktrace)
UndefinedFunctionError[module: mod, function: fun, arity: arity]
end
defp normalize_error(:function_clause) do
- { mod, fun, arity } = from_stacktrace(:erlang.get_stacktrace)
+ {mod, fun, arity} = from_stacktrace(:erlang.get_stacktrace)
FunctionClauseError[module: mod, function: fun, arity: arity]
end
- defp normalize_error({ :badarg, payload }) do
+ defp normalize_error({:badarg, payload}) do
ArgumentError[message: "argument error: #{inspect(payload)}"]
end
@@ -256,31 +256,31 @@ defmodule Exception do
def format_stacktrace_entry(entry)
# From Macro.Env.stacktrace
- def format_stacktrace_entry({ module, :__MODULE__, 0, location }) do
+ def format_stacktrace_entry({module, :__MODULE__, 0, location}) do
format_location(location) <> inspect(module) <> " (module)"
end
# From :elixir_compiler_*
- def format_stacktrace_entry({ _module, :__MODULE__, 1, location }) do
+ def format_stacktrace_entry({_module, :__MODULE__, 1, location}) do
format_location(location) <> "(module)"
end
# From :elixir_compiler_*
- def format_stacktrace_entry({ _module, :__FILE__, 1, location }) do
+ def format_stacktrace_entry({_module, :__FILE__, 1, location}) do
format_location(location) <> "(file)"
end
- def format_stacktrace_entry({ module, fun, arity, location }) do
+ def format_stacktrace_entry({module, fun, arity, location}) do
format_application(module) <> format_location(location) <> format_mfa(module, fun, arity)
end
- def format_stacktrace_entry({ fun, arity, location }) do
+ def format_stacktrace_entry({fun, arity, location}) do
format_location(location) <> format_fa(fun, arity)
end
defp format_application(module) do
case :application.get_application(module) do
- { :ok, app } -> "(" <> atom_to_binary(app) <> ") "
+ {:ok, app} -> "(" <> atom_to_binary(app) <> ") "
:undefined -> ""
end
end
@@ -396,15 +396,15 @@ defmodule Exception do
format_file_line Keyword.get(opts, :file), Keyword.get(opts, :line), " "
end
- defp from_stacktrace([{ module, function, args, _ }|_]) when is_list(args) do
- { module, function, length(args) }
+ defp from_stacktrace([{module, function, args, _}|_]) when is_list(args) do
+ {module, function, length(args)}
end
- defp from_stacktrace([{ module, function, arity, _ }|_]) do
- { module, function, arity }
+ defp from_stacktrace([{module, function, arity, _}|_]) do
+ {module, function, arity}
end
defp from_stacktrace(_) do
- { nil, nil, nil }
+ {nil, nil, nil}
end
end
diff --git a/lib/elixir/lib/file.ex b/lib/elixir/lib/file.ex
index 6440566e3..d895023cb 100644
--- a/lib/elixir/lib/file.ex
+++ b/lib/elixir/lib/file.ex
@@ -53,20 +53,20 @@ defmodule File.Stream do
stream
end
- def into(%{ path: path, modes: modes, raw: raw } = stream) do
+ def into(%{path: path, modes: modes, raw: raw} = stream) do
modes = for mode <- modes, not mode in [:read], do: mode
case :file.open(path, [:write|modes]) do
- { :ok, device } ->
- { :ok, into(device, stream, raw) }
- { :error, reason } ->
+ {:ok, device} ->
+ {:ok, into(device, stream, raw)}
+ {:error, reason} ->
raise File.Error, reason: reason, action: "stream", path: path
end
end
defp into(device, stream, raw) do
fn
- :ok, { :cont, x } ->
+ :ok, {:cont, x} ->
case raw do
true -> IO.binwrite(device, x)
false -> IO.write(device, x)
@@ -81,14 +81,14 @@ defmodule File.Stream do
end
defimpl Enumerable do
- def reduce(%{ path: path, modes: modes, line_or_bytes: line_or_bytes, raw: raw }, acc, fun) do
+ def reduce(%{path: path, modes: modes, line_or_bytes: line_or_bytes, raw: raw}, acc, fun) do
modes = for mode <- modes, not mode in [:write, :append], do: mode
start_fun =
fn ->
case :file.open(path, modes) do
- { :ok, device } -> device
- { :error, reason } ->
+ {:ok, device} -> device
+ {:error, reason} ->
raise File.Error, reason: reason, action: "stream", path: path
end
end
@@ -103,11 +103,11 @@ defmodule File.Stream do
end
def count(_stream) do
- { :error, __MODULE__ }
+ {:error, __MODULE__}
end
def member?(_stream, _term) do
- { :error, __MODULE__ }
+ {:error, __MODULE__}
end
end
end
@@ -143,17 +143,17 @@ defmodule File do
## API
Most of the functions in this module return `:ok` or
- `{ :ok, result }` in case of success, `{ :error, reason }`
+ `{:ok, result}` in case of success, `{:error, reason}`
otherwise. Those function are also followed by a variant
that ends with `!` which returns the result (without the
- `{ :ok, result }` tuple) in case of success or raises an
+ `{:ok, result}` tuple) in case of success or raises an
exception in case it fails. For example:
File.read("hello.txt")
- #=> { :ok, "World" }
+ #=> {:ok, "World"}
File.read("invalid.txt")
- #=> { :error, :enoent }
+ #=> {:error, :enoent}
File.read!("hello.txt")
#=> "World"
@@ -231,7 +231,7 @@ defmodule File do
"""
@spec exists?(Path.t) :: boolean
def exists?(path) do
- match?({ :ok, _ }, F.read_file_info(String.from_char_data!(path)))
+ match?({:ok, _}, F.read_file_info(String.from_char_data!(path)))
end
@doc """
@@ -260,7 +260,7 @@ defmodule File do
path = String.from_char_data!(path)
case mkdir(path) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "make directory", path: path
end
end
@@ -288,7 +288,7 @@ defmodule File do
path = String.from_char_data!(path)
case mkdir_p(path) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "make directory (with -p)", path: path
end
end
@@ -322,17 +322,17 @@ defmodule File do
def read!(path) do
path = String.from_char_data!(path)
case read(path) do
- { :ok, binary } ->
+ {:ok, binary} ->
binary
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "read file", path: path
end
end
@doc """
Returns information about the `path`. If it exists, it
- returns a `{ :ok, info }` tuple, where info is a
- `File.Stat` record. Returns `{ :error, reason }` with
+ returns a `{:ok, info}` tuple, where info is a
+ `File.Stat` record. Returns `{:error, reason}` with
the same reasons as `read/1` if a failure occurs.
## Options
@@ -343,11 +343,11 @@ defmodule File do
Default is `:local`.
"""
- @spec stat(Path.t, stat_options) :: {:ok, File.Stat.t } | {:error, posix}
+ @spec stat(Path.t, stat_options) :: {:ok, File.Stat.t} | {:error, posix}
def stat(path, opts \\ []) do
case F.read_file_info(String.from_char_data!(path), opts) do
{:ok, fileinfo} ->
- {:ok, set_elem(fileinfo, 0, File.Stat) }
+ {:ok, set_elem(fileinfo, 0, File.Stat)}
error ->
error
end
@@ -369,7 +369,7 @@ defmodule File do
@doc """
Writes the given `File.Stat` back to the filesystem at the given
- path. Returns `:ok` or `{ :error, reason }`.
+ path. Returns `:ok` or `{:error, reason}`.
"""
@spec write_stat(Path.t, File.Stat.t, stat_options) :: :ok | {:error, posix}
def write_stat(path, File.Stat[] = stat, opts \\ []) do
@@ -385,7 +385,7 @@ defmodule File do
path = String.from_char_data!(path)
case write_stat(path, stat, opts) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "write file stats", path: path
end
end
@@ -398,7 +398,7 @@ defmodule File do
def touch(path, time \\ :calendar.local_time) do
path = String.from_char_data!(path)
case F.change_time(path, time) do
- { :error, :enoent } ->
+ {:error, :enoent} ->
write(path, "")
F.change_time(path, time)
other ->
@@ -415,7 +415,7 @@ defmodule File do
path = String.from_char_data!(path)
case touch(path, time) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "touch", path: path
end
end
@@ -430,8 +430,8 @@ defmodule File do
If file `destination` already exists, it is overwritten
by the contents in `source`.
- Returns `{ :ok, bytes_copied }` if successful,
- `{ :error, reason }` otherwise.
+ Returns `{:ok, bytes_copied}` if successful,
+ `{:error, reason}` otherwise.
Compared to the `cp/3`, this function is more low-level,
allowing a copy from device to device limited by a number of
@@ -456,8 +456,8 @@ defmodule File do
source = String.from_char_data!(source)
destination = String.from_char_data!(destination)
case copy(source, destination, bytes_count) do
- { :ok, bytes_count } -> bytes_count
- { :error, reason } ->
+ {:ok, bytes_count} -> bytes_count
+ {:error, reason} ->
raise File.CopyError, reason: reason, action: "copy",
source: source, destination: destination
end
@@ -471,7 +471,7 @@ defmodule File do
should be overwritten, `false` otherwise. It defaults to return `true`.
It returns `:ok` in case of success, returns
- `{ :error, reason }` otherwise.
+ `{:error, reason}` otherwise.
If you want to copy contents from an io device to another device
or do a straight copy from a source to a destination without
@@ -488,7 +488,7 @@ defmodule File do
destination = String.from_char_data!(destination)
case do_cp_file(source, destination, callback, []) do
- { :error, reason, _ } -> { :error, reason }
+ {:error, reason, _} -> {:error, reason}
_ -> :ok
end
end
@@ -504,7 +504,7 @@ defmodule File do
case cp(source, destination, callback) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.CopyError, reason: reason, action: "copy recursively",
source: source, destination: destination
end
@@ -531,9 +531,9 @@ defmodule File do
directory in a dirty state, where already
copied files won't be removed.
- It returns `{ :ok, files_and_directories }` in case of
+ It returns `{:ok, files_and_directories}` in case of
success with all files and directories copied in no
- specific order, `{ :error, reason, file }` otherwise.
+ specific order, `{:error, reason, file}` otherwise.
Note: The command `cp` in Unix systems behaves differently
depending if `destination` is an existing directory or not.
@@ -559,8 +559,8 @@ defmodule File do
destination = String.from_char_data!(destination)
case do_cp_r(source, destination, callback, []) do
- { :error, _, _ } = error -> error
- res -> { :ok, res }
+ {:error, _, _} = error -> error
+ res -> {:ok, res}
end
end
@@ -574,8 +574,8 @@ defmodule File do
destination = String.from_char_data!(destination)
case cp_r(source, destination, callback) do
- { :ok, files } -> files
- { :error, reason, file } ->
+ {:ok, files} -> files
+ {:error, reason, file} ->
raise File.CopyError, reason: reason, action: "copy recursively",
source: source, destination: destination, on: file
end
@@ -585,27 +585,27 @@ defmodule File do
# a directory. Returns nil unless an error is found.
defp do_cp_r(src, dest, callback, acc) when is_list(acc) do
case :elixir_utils.file_type(src) do
- { :ok, :regular } ->
+ {:ok, :regular} ->
do_cp_file(src, dest, callback, acc)
- { :ok, :symlink } ->
+ {:ok, :symlink} ->
case F.read_link(src) do
- { :ok, link } -> do_cp_link(link, src, dest, callback, acc)
- { :error, reason } -> { :error, reason, src }
+ {:ok, link} -> do_cp_link(link, src, dest, callback, acc)
+ {:error, reason} -> {:error, reason, src}
end
- { :ok, :directory } ->
+ {:ok, :directory} ->
case F.list_dir(src) do
- { :ok, files } ->
+ {:ok, files} ->
case mkdir(dest) do
- success when success in [:ok, { :error, :eexist }] ->
+ success when success in [:ok, {:error, :eexist}] ->
Enum.reduce(files, [dest|acc], fn(x, acc) ->
do_cp_r(Path.join(src, x), Path.join(dest, x), callback, acc)
end)
- { :error, reason } -> { :error, reason, dest }
+ {:error, reason} -> {:error, reason, dest}
end
- { :error, reason } -> { :error, reason, src }
+ {:error, reason} -> {:error, reason, src}
end
- { :ok, _ } -> { :error, :eio, src }
- { :error, reason } -> { :error, reason, src }
+ {:ok, _} -> {:error, :eio, src}
+ {:error, reason} -> {:error, reason, src}
end
end
@@ -623,23 +623,23 @@ defmodule File do
# Both src and dest are files.
defp do_cp_file(src, dest, callback, acc) do
- case F.copy(src, { dest, [:exclusive] }) do
- { :ok, _ } ->
+ case F.copy(src, {dest, [:exclusive]}) do
+ {:ok, _} ->
copy_file_mode!(src, dest)
[dest|acc]
- { :error, :eexist } ->
+ {:error, :eexist} ->
if callback.(src, dest) do
rm(dest)
case copy(src, dest) do
- { :ok, _ } ->
+ {:ok, _} ->
copy_file_mode!(src, dest)
[dest|acc]
- { :error, reason } -> { :error, reason, src }
+ {:error, reason} -> {:error, reason, src}
end
else
acc
end
- { :error, reason } -> { :error, reason, src }
+ {:error, reason} -> {:error, reason, src}
end
end
@@ -648,17 +648,17 @@ defmodule File do
case F.make_symlink(link, dest) do
:ok ->
[dest|acc]
- { :error, :eexist } ->
+ {:error, :eexist} ->
if callback.(src, dest) do
rm(dest)
case F.make_symlink(link, dest) do
:ok -> [dest|acc]
- { :error, reason } -> { :error, reason, src }
+ {:error, reason} -> {:error, reason, src}
end
else
acc
end
- { :error, reason } -> { :error, reason, src }
+ {:error, reason} -> {:error, reason, src}
end
end
@@ -700,7 +700,7 @@ defmodule File do
path = String.from_char_data!(path)
case F.write_file(path, content, modes) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "write to file", path: path
end
end
@@ -740,7 +740,7 @@ defmodule File do
path = String.from_char_data!(path)
case rm(path) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "remove file", path: path
end
end
@@ -771,7 +771,7 @@ defmodule File do
path = String.from_char_data!(path)
case rmdir(path) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "remove directory", path: path
end
end
@@ -781,46 +781,46 @@ defmodule File do
Symlinks are not followed but simply removed, non-existing
files are simply ignored (i.e. doesn't make this function fail).
- Returns `{ :ok, files_and_directories }` with all files and
- directories removed in no specific order, `{ :error, reason, file }`
+ Returns `{:ok, files_and_directories}` with all files and
+ directories removed in no specific order, `{:error, reason, file}`
otherwise.
## Examples
File.rm_rf "samples"
- #=> { :ok, ["samples", "samples/1.txt"] }
+ #=> {:ok, ["samples", "samples/1.txt"]}
File.rm_rf "unknown"
- #=> { :ok, [] }
+ #=> {:ok, []}
"""
@spec rm_rf(Path.t) :: {:ok, [binary]} | {:error, posix, binary}
def rm_rf(path) do
- do_rm_rf(String.from_char_data!(path), { :ok, [] })
+ do_rm_rf(String.from_char_data!(path), {:ok, []})
end
- defp do_rm_rf(path, { :ok, _ } = entry) do
+ defp do_rm_rf(path, {:ok, _} = entry) do
case safe_list_dir(path) do
- { :ok, files } when is_list(files) ->
+ {:ok, files} when is_list(files) ->
res =
Enum.reduce files, entry, fn(file, tuple) ->
do_rm_rf(Path.join(path, file), tuple)
end
case res do
- { :ok, acc } ->
+ {:ok, acc} ->
case rmdir(path) do
- :ok -> { :ok, [path|acc] }
- { :error, :enoent } -> res
- { :error, reason } -> { :error, reason, path }
+ :ok -> {:ok, [path|acc]}
+ {:error, :enoent} -> res
+ {:error, reason} -> {:error, reason, path}
end
reason ->
reason
end
- { :ok, :directory } -> do_rm_directory(path, entry)
- { :ok, :regular } -> do_rm_regular(path, entry)
- { :error, reason } when reason in [:enoent, :enotdir] -> entry
- { :error, reason } -> { :error, reason, path }
+ {:ok, :directory} -> do_rm_directory(path, entry)
+ {:ok, :regular} -> do_rm_regular(path, entry)
+ {:error, reason} when reason in [:enoent, :enotdir] -> entry
+ {:error, reason} -> {:error, reason, path}
end
end
@@ -828,11 +828,11 @@ defmodule File do
reason
end
- defp do_rm_regular(path, { :ok, acc } = entry) do
+ defp do_rm_regular(path, {:ok, acc} = entry) do
case rm(path) do
- :ok -> { :ok, [path|acc] }
- { :error, :enoent } -> entry
- { :error, reason } -> { :error, reason, path }
+ :ok -> {:ok, [path|acc]}
+ {:error, :enoent} -> entry
+ {:error, reason} -> {:error, reason, path}
end
end
@@ -840,28 +840,28 @@ defmodule File do
# with rmdir/1. But on Unix, we remove them via rm/1. So we first try
# to remove it as a directory and, if we get :enotdir, we fallback to
# a file removal.
- defp do_rm_directory(path, { :ok, acc } = entry) do
+ defp do_rm_directory(path, {:ok, acc} = entry) do
case rmdir(path) do
- :ok -> { :ok, [path|acc] }
- { :error, :enotdir } -> do_rm_regular(path, entry)
- { :error, :enoent } -> entry
- { :error, reason } -> { :error, reason, path }
+ :ok -> {:ok, [path|acc]}
+ {:error, :enotdir} -> do_rm_regular(path, entry)
+ {:error, :enoent} -> entry
+ {:error, reason} -> {:error, reason, path}
end
end
defp safe_list_dir(path) do
case :elixir_utils.file_type(path) do
- { :ok, :symlink } ->
+ {:ok, :symlink} ->
case :elixir_utils.file_type(path, :read_file_info) do
- { :ok, :directory } -> { :ok, :directory }
- _ -> { :ok, :regular }
+ {:ok, :directory} -> {:ok, :directory}
+ _ -> {:ok, :regular}
end
- { :ok, :directory } ->
+ {:ok, :directory} ->
F.list_dir(path)
- { :ok, _ } ->
- { :ok, :regular }
- { :error, reason } ->
- { :error, reason }
+ {:ok, _} ->
+ {:ok, :regular}
+ {:error, reason} ->
+ {:error, reason}
end
end
@@ -873,8 +873,8 @@ defmodule File do
def rm_rf!(path) do
path = String.from_char_data!(path)
case rm_rf(path) do
- { :ok, files } -> files
- { :error, reason, _ } ->
+ {:ok, files} -> files
+ {:error, reason, _} ->
raise File.Error, reason: reason, path: path,
action: "remove files and directories recursively from"
end
@@ -901,7 +901,7 @@ defmodule File do
Every write operation to a file opened with append will take place at the end of the file.
* `:exclusive` - The file, when opened for writing, is created if it does not exist.
- If the file exists, open will return `{ :error, :eexist }`.
+ If the file exists, open will return `{:error, :eexist}`.
* `:char_list` - When this term is given, read operations on the file will return char lists rather than binaries;
@@ -921,18 +921,18 @@ defmodule File do
This function returns:
- * `{ :ok, io_device }` - The file has been opened in the requested mode.
+ * `{:ok, io_device}` - The file has been opened in the requested mode.
`io_device` is actually the pid of the process which handles the file.
This process is linked to the process which originally opened the file.
If any process to which the `io_device` is linked terminates, the file will
be closed and the process itself will be terminated. An `io_device` returned
from this call can be used as an argument to the `IO` module functions.
- * `{ :error, reason }` - The file could not be opened.
+ * `{:error, reason}` - The file could not be opened.
## Examples
- { :ok, file } = File.open("foo.tar.gz", [:read, :compressed])
+ {:ok, file} = File.open("foo.tar.gz", [:read, :compressed])
IO.read(file, :line)
File.close(file)
@@ -955,8 +955,8 @@ defmodule File do
automatically closed after the function returns, regardless
if there was an error or not.
- It returns `{ :ok, function_result }` in case of success,
- `{ :error, reason }` otherwise.
+ It returns `{:ok, function_result}` in case of success,
+ `{:error, reason}` otherwise.
Do not use this function with `:delayed_write` option
since automatically closing the file may fail
@@ -972,9 +972,9 @@ defmodule File do
@spec open(Path.t, list, (io_device -> res)) :: {:ok, res} | {:error, posix} when res: var
def open(path, modes, function) do
case open(path, modes) do
- { :ok, device } ->
+ {:ok, device} ->
try do
- { :ok, function.(device) }
+ {:ok, function.(device)}
after
:ok = close(device)
end
@@ -990,8 +990,8 @@ defmodule File do
def open!(path, modes \\ []) do
path = String.from_char_data!(path)
case open(path, modes) do
- { :ok, device } -> device
- { :error, reason } ->
+ {:ok, device} -> device
+ {:error, reason} ->
raise File.Error, reason: reason, action: "open", path: path
end
end
@@ -1004,8 +1004,8 @@ defmodule File do
def open!(path, modes, function) do
path = String.from_char_data!(path)
case open(path, modes, function) do
- { :ok, device } -> device
- { :error, reason } ->
+ {:ok, device} -> device
+ {:error, reason} ->
raise File.Error, reason: reason, action: "open", path: path
end
end
@@ -1015,14 +1015,14 @@ defmodule File do
In rare circumstances, this function can fail on Unix. It may happen
if read permission does not exist for the parent directories of the
- current directory. For this reason, returns `{ :ok, cwd }` in case
- of success, `{ :error, reason }` otherwise.
+ current directory. For this reason, returns `{:ok, cwd}` in case
+ of success, `{:error, reason}` otherwise.
"""
@spec cwd() :: {:ok, binary} | {:error, posix}
def cwd() do
case F.get_cwd do
- { :ok, base } -> { :ok, String.from_char_data!(base) }
- { :error, _ } = error -> error
+ {:ok, base} -> {:ok, String.from_char_data!(base)}
+ {:error, _} = error -> error
end
end
@@ -1032,8 +1032,8 @@ defmodule File do
@spec cwd!() :: binary | no_return
def cwd!() do
case F.get_cwd do
- { :ok, cwd } -> String.from_char_data!(cwd)
- { :error, reason } ->
+ {:ok, cwd} -> String.from_char_data!(cwd)
+ {:error, reason} ->
raise File.Error, reason: reason, action: "get current working directory"
end
end
@@ -1041,7 +1041,7 @@ defmodule File do
@doc """
Sets the current working directory.
- Returns `:ok` if successful, `{ :error, reason }` otherwise.
+ Returns `:ok` if successful, `{:error, reason}` otherwise.
"""
@spec cd(Path.t) :: :ok | {:error, posix}
def cd(path) do
@@ -1056,7 +1056,7 @@ defmodule File do
path = String.from_char_data!(path)
case F.set_cwd(path) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "set current working directory to", path: path
end
end
@@ -1083,14 +1083,14 @@ defmodule File do
@doc """
Returns list of files in the given directory.
- It returns `{ :ok, [files] }` in case of success,
- `{ :error, reason }` otherwise.
+ It returns `{:ok, [files]}` in case of success,
+ `{:error, reason}` otherwise.
"""
@spec ls(Path.t) :: {:ok, [binary]} | {:error, posix}
def ls(path \\ ".") do
case F.list_dir(String.from_char_data!(path)) do
- { :ok, file_list } -> { :ok, Enum.map(file_list, &String.from_char_data!/1) }
- { :error, _ } = error -> error
+ {:ok, file_list} -> {:ok, Enum.map(file_list, &String.from_char_data!/1)}
+ {:error, _} = error -> error
end
end
@@ -1102,8 +1102,8 @@ defmodule File do
def ls!(path \\ ".") do
path = String.from_char_data!(path)
case ls(path) do
- { :ok, value } -> value
- { :error, reason } ->
+ {:ok, value} -> value
+ {:error, reason} ->
raise File.Error, reason: reason, action: "list directory", path: path
end
end
@@ -1183,7 +1183,7 @@ defmodule File do
path = String.from_char_data!(path)
case chmod(path, mode) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "change mode for", path: path
end
end
@@ -1206,7 +1206,7 @@ defmodule File do
path = String.from_char_data!(path)
case chgrp(path, gid) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "change group for", path: path
end
end
@@ -1229,7 +1229,7 @@ defmodule File do
path = String.from_char_data!(path)
case chown(path, uid) do
:ok -> :ok
- { :error, reason } ->
+ {:error, reason} ->
raise File.Error, reason: reason, action: "change owner for", path: path
end
end
@@ -1243,11 +1243,11 @@ defmodule File do
end
defp open_defaults([:utf8|t], add_binary) do
- open_defaults([{ :encoding, :utf8 }|t], add_binary)
+ open_defaults([{:encoding, :utf8}|t], add_binary)
end
defp open_defaults([:read_ahead|t], add_binary) do
- open_defaults([{ :read_ahead, @read_ahead }|t], add_binary)
+ open_defaults([{:read_ahead, @read_ahead}|t], add_binary)
end
defp open_defaults([h|t], add_binary) do
diff --git a/lib/elixir/lib/float.ex b/lib/elixir/lib/float.ex
index 539a335bc..efeaa27bb 100644
--- a/lib/elixir/lib/float.ex
+++ b/lib/elixir/lib/float.ex
@@ -6,7 +6,7 @@ defmodule Float do
@doc """
Parses a binary into a float.
- If successful, returns a tuple of the form `{ float, remainder_of_binary }`.
+ If successful, returns a tuple of the form `{float, remainder_of_binary}`.
Otherwise `:error`.
## Examples
@@ -24,11 +24,11 @@ defmodule Float do
:error
"""
- @spec parse(binary) :: { float, binary } | :error
+ @spec parse(binary) :: {float, binary} | :error
def parse("-" <> binary) do
case parse_unsign(binary) do
:error -> :error
- { number, remainder } -> { -number, remainder }
+ {number, remainder} -> {-number, remainder}
end
end
@@ -40,7 +40,7 @@ defmodule Float do
defp parse_unsign(binary) when is_binary(binary) do
case Integer.parse binary do
:error -> :error
- { integer_part, after_integer } -> parse_unsign after_integer, integer_part
+ {integer_part, after_integer} -> parse_unsign after_integer, integer_part
end
end
@@ -50,7 +50,7 @@ defmodule Float do
end
defp parse_unsign(rest, int) do
- { :erlang.float(int), rest }
+ {:erlang.float(int), rest}
end
# Handle decimal points
@@ -65,14 +65,14 @@ defmodule Float do
# the function clause because the current approach copies a binary
# just on this branch. If we broke it apart in the function clause,
# the copy would happen when calling Integer.parse/1.
- { floatify(int, float, decimal), << ?e, after_e :: binary >> }
- { exponential, after_exponential } ->
- { floatify(int, float, decimal, exponential), after_exponential }
+ {floatify(int, float, decimal), << ?e, after_e :: binary >>}
+ {exponential, after_exponential} ->
+ {floatify(int, float, decimal, exponential), after_exponential}
end
end
defp parse_unsign(bitstring, float, decimal, int) do
- { floatify(int, float, decimal), bitstring }
+ {floatify(int, float, decimal), bitstring}
end
defp floatify(int, float, decimal, exponential \\ 0) do
diff --git a/lib/elixir/lib/gen_event/behaviour.ex b/lib/elixir/lib/gen_event/behaviour.ex
index d01181ca4..80364a1f6 100644
--- a/lib/elixir/lib/gen_event/behaviour.ex
+++ b/lib/elixir/lib/gen_event/behaviour.ex
@@ -15,11 +15,11 @@ defmodule GenEvent.Behaviour do
# Callbacks
def init(_) do
- { :ok, [] }
+ {:ok, []}
end
def handle_event({:notification, x}, notifications) do
- { :ok, [x|notifications] }
+ {:ok, [x|notifications]}
end
def handle_call(:notifications, notifications) do
@@ -27,7 +27,7 @@ defmodule GenEvent.Behaviour do
end
end
- { :ok, pid } = :gen_event.start_link
+ {:ok, pid} = :gen_event.start_link
#=> {:ok,#PID<0.42.0>}
:gen_event.add_handler(pid, MyEventHandler, [])
@@ -63,22 +63,22 @@ defmodule GenEvent.Behaviour do
@doc false
def init(args) do
- { :ok, args }
+ {:ok, args}
end
@doc false
def handle_event(_event, state) do
- { :ok, state }
+ {:ok, state}
end
@doc false
def handle_call(_request, state) do
- { :ok, :ok, state }
+ {:ok, :ok, state}
end
@doc false
def handle_info(_msg, state) do
- { :ok, state }
+ {:ok, state}
end
@doc false
@@ -88,7 +88,7 @@ defmodule GenEvent.Behaviour do
@doc false
def code_change(_old, state, _extra) do
- { :ok, state }
+ {:ok, state}
end
defoverridable [init: 1,
diff --git a/lib/elixir/lib/gen_server/behaviour.ex b/lib/elixir/lib/gen_server/behaviour.ex
index 50f2be2f9..c81f0923a 100644
--- a/lib/elixir/lib/gen_server/behaviour.ex
+++ b/lib/elixir/lib/gen_server/behaviour.ex
@@ -17,7 +17,7 @@ defmodule GenServer.Behaviour do
# Callbacks
def handle_call(:pop, _from, [h|t]) do
- { :reply, h, t }
+ {:reply, h, t}
end
def handle_call(request, from, config) do
@@ -25,8 +25,8 @@ defmodule GenServer.Behaviour do
super(request, from, config)
end
- def handle_cast({ :push, item }, config) do
- { :noreply, [item|config] }
+ def handle_cast({:push, item}, config) do
+ {:noreply, [item|config]}
end
def handle_cast(request, config) do
@@ -34,12 +34,12 @@ defmodule GenServer.Behaviour do
end
end
- { :ok, pid } = :gen_server.start_link(MyServer, [:hello], [])
+ {:ok, pid} = :gen_server.start_link(MyServer, [:hello], [])
:gen_server.call(pid, :pop)
#=> :hello
- :gen_server.cast(pid, { :push, :world })
+ :gen_server.cast(pid, {:push, :world})
#=> :ok
:gen_server.call(pid, :pop)
@@ -51,14 +51,14 @@ defmodule GenServer.Behaviour do
GenServer, there are 8 different values a callback such as
`handle_call` or `handle_cast` can return:
- { :reply, reply, new_state }
- { :reply, reply, new_state, timeout }
- { :reply, reply, new_state, :hibernate }
- { :noreply, new_state }
- { :noreply, new_state, timeout }
- { :noreply, new_state, :hibernate }
- { :stop, reason, new_state }
- { :stop, reason, reply, new_state }
+ {:reply, reply, new_state}
+ {:reply, reply, new_state, timeout}
+ {:reply, reply, new_state, :hibernate}
+ {:noreply, new_state}
+ {:noreply, new_state, timeout}
+ {:noreply, new_state, :hibernate}
+ {:stop, reason, new_state}
+ {:stop, reason, reply, new_state}
There are 6 callbacks required to be implemented in a GenServer. The
`GenServer.Behaviour` module defines all of them automatically, but
@@ -90,22 +90,22 @@ defmodule GenServer.Behaviour do
@doc false
def init(args) do
- { :ok, args }
+ {:ok, args}
end
@doc false
def handle_call(request, _from, state) do
- { :stop, { :bad_call, request }, state }
+ {:stop, {:bad_call, request}, state}
end
@doc false
def handle_info(_msg, state) do
- { :noreply, state }
+ {:noreply, state}
end
@doc false
def handle_cast(msg, state) do
- { :stop, { :bad_cast, msg }, state }
+ {:stop, {:bad_cast, msg}, state}
end
@doc false
@@ -115,7 +115,7 @@ defmodule GenServer.Behaviour do
@doc false
def code_change(_old, state, _extra) do
- { :ok, state }
+ {:ok, state}
end
defoverridable [init: 1, handle_call: 3, handle_info: 2,
diff --git a/lib/elixir/lib/hash_dict.ex b/lib/elixir/lib/hash_dict.ex
index a0ffe00f9..29c0c7091 100644
--- a/lib/elixir/lib/hash_dict.ex
+++ b/lib/elixir/lib/hash_dict.ex
@@ -21,7 +21,7 @@ defmodule HashDict do
# Inline common instructions
@compile :inline_list_funcs
- @compile { :inline, key_hash: 1, key_mask: 1, key_shift: 1 }
+ @compile {:inline, key_hash: 1, key_mask: 1, key_shift: 1}
@doc """
Creates a new empty dict.
@@ -35,7 +35,7 @@ defmodule HashDict do
@spec new(Enum.t) :: Dict.t
def new(enum) do
IO.write :stderr, "HashDict.new/1 is deprecated, please use Enum.into/2 instead\n#{Exception.format_stacktrace}"
- Enum.reduce enum, trie(), fn { k, v }, dict ->
+ Enum.reduce enum, trie(), fn {k, v}, dict ->
put(dict, k, v)
end
end
@@ -45,24 +45,24 @@ defmodule HashDict do
def new(enum, transform) when is_function(transform) do
IO.write :stderr, "HashDict.new/2 is deprecated, please use Enum.into/3 instead\n#{Exception.format_stacktrace}"
Enum.reduce enum, trie(), fn i, dict ->
- { k, v } = transform.(i)
+ {k, v} = transform.(i)
put(dict, k, v)
end
end
def put(trie(root: root, size: size), key, value) do
- { root, counter } = do_put(root, key, value, key_hash(key))
+ {root, counter} = do_put(root, key, value, key_hash(key))
trie(root: root, size: size + counter)
end
def update!(trie(root: root, size: size) = dict, key, fun) when is_function(fun, 1) do
- { root, counter } = do_update(root, key, fn -> raise KeyError, key: key, term: dict end,
+ {root, counter} = do_update(root, key, fn -> raise KeyError, key: key, term: dict end,
fun, key_hash(key))
trie(root: root, size: size + counter)
end
def update(trie(root: root, size: size), key, initial, fun) when is_function(fun, 1) do
- { root, counter } = do_update(root, key, fn -> initial end, fun, key_hash(key))
+ {root, counter} = do_update(root, key, fn -> initial end, fun, key_hash(key))
trie(root: root, size: size + counter)
end
@@ -72,15 +72,15 @@ defmodule HashDict do
def delete(dict, key) do
case dict_delete(dict, key) do
- { dict, _value } -> dict
+ {dict, _value} -> dict
:error -> dict
end
end
def pop(dict, key, default \\ nil) do
case dict_delete(dict, key) do
- { dict, value } -> { value, dict }
- :error -> { default, dict }
+ {dict, value} -> {value, dict}
+ :error -> {default, dict}
end
end
@@ -91,30 +91,30 @@ defmodule HashDict do
@doc false
def reduce(trie(root: root), acc, fun) do
do_reduce(root, acc, fun, @node_size, fn
- {:suspend, acc} -> {:suspended, acc, &{ :done, elem(&1, 1) }}
+ {:suspend, acc} -> {:suspended, acc, &{:done, elem(&1, 1)}}
{:halt, acc} -> {:halted, acc}
{:cont, acc} -> {:done, acc}
end)
end
def split(dict, keys) do
- Enum.reduce keys, { new, dict }, fn key, { inc, exc } = acc ->
+ Enum.reduce keys, {new, dict}, fn key, {inc, exc} = acc ->
case dict_delete(exc, key) do
- { exc, value } -> { put(inc, key, value), exc }
+ {exc, value} -> {put(inc, key, value), exc}
:error -> acc
end
end
end
def merge(trie(size: size1) = dict1, trie(size: size2) = dict2, callback) when size1 < size2 do
- reduce(dict1, { :cont, dict2 }, fn { k, v1 }, acc ->
- { :cont, update(acc, k, v1, &callback.(k, v1, &1)) }
+ reduce(dict1, {:cont, dict2}, fn {k, v1}, acc ->
+ {:cont, update(acc, k, v1, &callback.(k, v1, &1))}
end) |> elem(1)
end
def merge(trie() = dict1, trie() = dict2, callback) do
- reduce(dict2, { :cont, dict1 }, fn { k, v2 }, acc ->
- { :cont, update(acc, k, v2, &callback.(k, &1, v2)) }
+ reduce(dict2, {:cont, dict1}, fn {k, v2}, acc ->
+ {:cont, update(acc, k, v2, &callback.(k, &1, v2))}
end) |> elem(1)
end
@@ -122,7 +122,7 @@ defmodule HashDict do
defp dict_delete(trie(root: root, size: size), key) do
case do_delete(root, key, key_hash(key)) do
- { root, value } -> { trie(root: root, size: size - 1), value }
+ {root, value} -> {trie(root: root, size: size - 1), value}
:error -> :error
end
end
@@ -261,9 +261,9 @@ end
defimpl Enumerable, for: HashDict do
def reduce(dict, acc, fun), do: HashDict.reduce(dict, acc, fun)
- def member?(dict, { k, v }), do: { :ok, match?({ :ok, ^v }, HashDict.fetch(dict, k)) }
- def member?(_dict, _), do: { :ok, false }
- def count(dict), do: { :ok, HashDict.size(dict) }
+ def member?(dict, {k, v}), do: {:ok, match?({:ok, ^v}, HashDict.fetch(dict, k))}
+ def member?(_dict, _), do: {:ok, false}
+ def count(dict), do: {:ok, HashDict.size(dict)}
end
defimpl Access, for: HashDict do
@@ -276,10 +276,10 @@ defimpl Collectable, for: HashDict do
end
def into(original) do
- { original, fn
- dict, { :cont, { k, v } } -> Dict.put(dict, k, v)
+ {original, fn
+ dict, {:cont, {k, v}} -> Dict.put(dict, k, v)
dict, :done -> dict
_, :halt -> :ok
- end }
+ end}
end
end
diff --git a/lib/elixir/lib/hash_set.ex b/lib/elixir/lib/hash_set.ex
index 0577e7400..25951d013 100644
--- a/lib/elixir/lib/hash_set.ex
+++ b/lib/elixir/lib/hash_set.ex
@@ -21,7 +21,7 @@ defmodule HashSet do
# Inline common instructions
@compile :inline_list_funcs
- @compile { :inline, key_hash: 1, key_mask: 1, key_shift: 1 }
+ @compile {:inline, key_hash: 1, key_mask: 1, key_shift: 1}
@doc """
Creates a new empty set.
@@ -79,19 +79,19 @@ defmodule HashSet do
end
def subset?(trie() = set1, trie() = set2) do
- reduce(set1, { :cont, true }, fn member, acc ->
+ reduce(set1, {:cont, true}, fn member, acc ->
case member?(set2, member) do
- true -> { :cont, acc }
- _ -> { :halt, false }
+ true -> {:cont, acc}
+ _ -> {:halt, false}
end
end) |> elem(1)
end
def disjoint?(trie() = set1, trie() = set2) do
- reduce(set2, { :cont, true }, fn member, acc ->
+ reduce(set2, {:cont, true}, fn member, acc ->
case member?(set1, member) do
- false -> { :cont, acc }
- _ -> { :halt, false }
+ false -> {:cont, acc}
+ _ -> {:halt, false}
end
end) |> elem(1)
end
@@ -115,7 +115,7 @@ defmodule HashSet do
@doc false
def reduce(trie(root: root), acc, fun) do
do_reduce(root, acc, fun, @node_size, fn
- {:suspend, acc} -> {:suspended, acc, &{ :done, elem(&1, 1) }}
+ {:suspend, acc} -> {:suspended, acc, &{:done, elem(&1, 1)}}
{:halt, acc} -> {:halted, acc}
{:cont, acc} -> {:done, acc}
end)
@@ -260,8 +260,8 @@ end
defimpl Enumerable, for: HashSet do
def reduce(set, acc, fun), do: HashSet.reduce(set, acc, fun)
- def member?(set, v), do: { :ok, HashSet.member?(set, v) }
- def count(set), do: { :ok, HashSet.size(set) }
+ def member?(set, v), do: {:ok, HashSet.member?(set, v)}
+ def count(set), do: {:ok, HashSet.size(set)}
end
defimpl Collectable, for: HashSet do
@@ -270,10 +270,10 @@ defimpl Collectable, for: HashSet do
end
def into(original) do
- { original, fn
- set, { :cont, x } -> HashSet.put(set, x)
+ {original, fn
+ set, {:cont, x} -> HashSet.put(set, x)
set, :done -> set
_, :halt -> :ok
- end }
+ end}
end
end
diff --git a/lib/elixir/lib/inspect.ex b/lib/elixir/lib/inspect.ex
index 8223e7b97..518a69652 100644
--- a/lib/elixir/lib/inspect.ex
+++ b/lib/elixir/lib/inspect.ex
@@ -165,7 +165,7 @@ defimpl Inspect, for: BitString do
escape(t, char, << binary :: binary, ?\\, char >>)
end
defp escape(<<?#, ?{, t :: binary>>, char, binary) do
- escape(t, char, << binary :: binary, ?\\, ?#, ?{ >>)
+ escape(t, char, << binary :: binary, ?\\, ?#, ?{>>)
end
defp escape(<<?\a, t :: binary>>, char, binary) do
escape(t, char, << binary :: binary, ?\\, ?a >>)
@@ -299,7 +299,7 @@ defimpl Inspect, for: List do
end
end
- def keyword?([{ key, _value } | rest]) when is_atom(key) do
+ def keyword?([{key, _value} | rest]) when is_atom(key) do
case atom_to_list(key) do
'Elixir.' ++ _ -> false
_ -> keyword?(rest)
@@ -352,10 +352,10 @@ defimpl Inspect, for: Tuple do
)
end
- defp zip_fields([{ key, _ }|tk], [value|tv]) do
+ defp zip_fields([{key, _}|tk], [value|tv]) do
case atom_to_binary(key) do
"_" <> _ -> zip_fields(tk, tv)
- key -> [{ key, value }|zip_fields(tk, tv)]
+ key -> [{key, value}|zip_fields(tk, tv)]
end
end
@@ -363,7 +363,7 @@ defimpl Inspect, for: Tuple do
[]
end
- defp keyword({ k, v }, opts) do
+ defp keyword({k, v}, opts) do
concat(k <> ": ", to_doc(v, opts))
end
end
diff --git a/lib/elixir/lib/inspect/algebra.ex b/lib/elixir/lib/inspect/algebra.ex
index c7732910d..e63ef73b8 100644
--- a/lib/elixir/lib/inspect/algebra.ex
+++ b/lib/elixir/lib/inspect/algebra.ex
@@ -122,7 +122,7 @@ defmodule Inspect.Algebra do
end
end
- def to_doc(%{ __struct__: struct } = map, opts) when is_atom(struct) and is_record(opts, Inspect.Opts) do
+ def to_doc(%{__struct__: struct} = map, opts) when is_atom(struct) and is_record(opts, Inspect.Opts) do
if opts.structs do
try do
Inspect.inspect(map, opts)
@@ -415,7 +415,7 @@ defmodule Inspect.Algebra do
@typep mode :: :flat | :break
@doc false
- @spec fits?(integer, [{ integer, mode, t }]) :: boolean
+ @spec fits?(integer, [{integer, mode, t}]) :: boolean
def fits?(w, _) when w < 0, do: false
def fits?(_, []), do: true
def fits?(_, [{_, _, :doc_line} | _]), do: true
@@ -428,7 +428,7 @@ defmodule Inspect.Algebra do
def fits?(_, [{_, :break, doc_break(str: _)} | _]), do: true
@doc false
- @spec format(integer | :infinity, integer, [{ integer, mode, t }]) :: [binary]
+ @spec format(integer | :infinity, integer, [{integer, mode, t}]) :: [binary]
def format(_, _, []), do: []
def format(w, _, [{i, _, :doc_line} | t]), do: [indent(i) | format(w, i, t)]
def format(w, k, [{_, _, :doc_nil} | t]), do: format(w, k, t)
diff --git a/lib/elixir/lib/integer.ex b/lib/elixir/lib/integer.ex
index 8f76e05c5..2f202caae 100644
--- a/lib/elixir/lib/integer.ex
+++ b/lib/elixir/lib/integer.ex
@@ -28,7 +28,7 @@ defmodule Integer do
@doc """
Converts a binary to an integer.
- If successful, returns a tuple of the form `{ integer, remainder_of_binary }`.
+ If successful, returns a tuple of the form `{integer, remainder_of_binary}`.
Otherwise `:error`.
## Examples
@@ -43,11 +43,11 @@ defmodule Integer do
:error
"""
- @spec parse(binary) :: { integer, binary } | :error
+ @spec parse(binary) :: {integer, binary} | :error
def parse(<< ?-, bin :: binary >>) do
case do_parse(bin) do
:error -> :error
- { number, remainder } -> { -number, remainder }
+ {number, remainder} -> {-number, remainder}
end
end
@@ -67,6 +67,6 @@ defmodule Integer do
end
defp do_parse(bitstring, acc) do
- { acc, bitstring }
+ {acc, bitstring}
end
end
diff --git a/lib/elixir/lib/io.ex b/lib/elixir/lib/io.ex
index baa3b5534..94f8006a0 100644
--- a/lib/elixir/lib/io.ex
+++ b/lib/elixir/lib/io.ex
@@ -25,13 +25,13 @@ defmodule IO.Stream do
stream
end
- def into(%{ device: device, raw: raw } = stream) do
- { :ok, into(stream, device, raw) }
+ def into(%{device: device, raw: raw} = stream) do
+ {:ok, into(stream, device, raw)}
end
defp into(stream, device, raw) do
fn
- :ok, { :cont, x } ->
+ :ok, {:cont, x} ->
case raw do
true -> IO.binwrite(device, x)
false -> IO.write(device, x)
@@ -42,7 +42,7 @@ defmodule IO.Stream do
end
defimpl Enumerable do
- def reduce(%{ device: device, raw: raw, line_or_bytes: line_or_bytes }, acc, fun) do
+ def reduce(%{device: device, raw: raw, line_or_bytes: line_or_bytes}, acc, fun) do
next_fun =
case raw do
true -> &IO.each_binstream(&1, line_or_bytes)
@@ -52,11 +52,11 @@ defmodule IO.Stream do
end
def count(_stream) do
- { :error, __MODULE__ }
+ {:error, __MODULE__}
end
def member?(_stream, _term) do
- { :error, __MODULE__ }
+ {:error, __MODULE__}
end
end
end
@@ -80,7 +80,7 @@ defmodule IO do
"""
@type device :: atom | pid
- @type nodata :: { :error, term } | :eof
+ @type nodata :: {:error, term} | :eof
import :erlang, only: [group_leader: 0]
@@ -130,14 +130,14 @@ defmodule IO do
def binread(device, :line) do
case :file.read_line(map_dev(device)) do
- { :ok, data } -> data
+ {:ok, data} -> data
other -> other
end
end
def binread(device, count) when count >= 0 do
case :file.read(map_dev(device), count) do
- { :ok, data } -> data
+ {:ok, data} -> data
other -> other
end
end
@@ -168,7 +168,7 @@ defmodule IO do
Check `write/2` for more information.
"""
- @spec binwrite(device, iodata) :: :ok | { :error, term }
+ @spec binwrite(device, iodata) :: :ok | {:error, term}
def binwrite(device \\ group_leader(), item) when is_iodata(item) do
:file.write map_dev(device), item
end
@@ -211,8 +211,8 @@ defmodule IO do
unless Keyword.get(opts, :width) do
opts = case :io.columns(device) do
- { :ok, width } -> [width: width] ++ opts
- { :error, _ } -> opts
+ {:ok, width} -> [width: width] ++ opts
+ {:error, _} -> opts
end
end
@@ -301,7 +301,7 @@ defmodule IO do
"""
@spec stream(device, :line | pos_integer) :: Enumerable.t
def stream(device, line_or_codepoints) do
- %IO.Stream{ device: map_dev(device), raw: false, line_or_bytes: line_or_codepoints }
+ %IO.Stream{device: map_dev(device), raw: false, line_or_bytes: line_or_codepoints}
end
@doc """
@@ -319,7 +319,7 @@ defmodule IO do
"""
@spec binstream(device, :line | pos_integer) :: Enumerable.t
def binstream(device, line_or_bytes) do
- %IO.Stream{ device: map_dev(device), raw: true, line_or_bytes: line_or_bytes }
+ %IO.Stream{device: map_dev(device), raw: true, line_or_bytes: line_or_bytes}
end
@doc false
@@ -327,10 +327,10 @@ defmodule IO do
case read(device, what) do
:eof ->
nil
- { :error, reason } ->
+ {:error, reason} ->
raise IO.StreamError, reason: reason
data ->
- { data, device }
+ {data, device}
end
end
@@ -339,10 +339,10 @@ defmodule IO do
case binread(device, what) do
:eof ->
nil
- { :error, reason } ->
+ {:error, reason} ->
raise IO.StreamError, reason: reason
data ->
- { data, device }
+ {data, device}
end
end
diff --git a/lib/elixir/lib/io/ansi.ex b/lib/elixir/lib/io/ansi.ex
index f3abf0abb..c46f5b0c8 100644
--- a/lib/elixir/lib/io/ansi.ex
+++ b/lib/elixir/lib/io/ansi.ex
@@ -8,7 +8,7 @@ defmodule IO.ANSI.Sequence do
end
defp escape_sequence(<< unquote(atom_to_binary(name)), rest :: binary >>) do
- { "\e[#{unquote(code)}#{unquote(terminator)}", rest }
+ {"\e[#{unquote(code)}#{unquote(terminator)}", rest}
end
end
end
@@ -92,7 +92,7 @@ defmodule IO.ANSI do
colors = [:black, :red, :green, :yellow, :blue, :magenta, :cyan, :white]
colors = Enum.zip(0..(length(colors)-1), colors)
- for { code, color } <- colors do
+ for {code, color} <- colors do
@doc "Sets foreground color to #{color}"
defsequence color, code + 30
diff --git a/lib/elixir/lib/io/ansi/docs.ex b/lib/elixir/lib/io/ansi/docs.ex
index 55d039ed9..8ea0f6591 100644
--- a/lib/elixir/lib/io/ansi/docs.ex
+++ b/lib/elixir/lib/io/ansi/docs.ex
@@ -79,7 +79,7 @@ defmodule IO.ANSI.Docs do
end
defp process([line | rest], indent, colors) do
- { stripped, count } = strip_spaces(line, 0)
+ {stripped, count} = strip_spaces(line, 0)
case stripped do
<<bullet, ?\s, item :: binary >> when bullet in @bullets ->
process_list(item, rest, count, indent, colors)
@@ -93,7 +93,7 @@ defmodule IO.ANSI.Docs do
end
defp strip_spaces(rest, acc) do
- { rest, acc }
+ {rest, acc}
end
## Headings
@@ -115,7 +115,7 @@ defmodule IO.ANSI.Docs do
defp process_list(line, rest, count, indent, colors) do
IO.write indent <> "• "
- { contents, rest, done } = process_list_next(rest, count, false, [])
+ {contents, rest, done} = process_list_next(rest, count, false, [])
process_text(contents, [line], indent <> " ", true, colors)
if done, do: IO.puts(IO.ANSI.reset)
process(rest, indent, colors)
@@ -129,13 +129,13 @@ defmodule IO.ANSI.Docs do
#
defp process_list_next([" " <> _ = line | rest], count, _done, acc) do
case list_next(line, count) do
- :done -> { Enum.reverse(acc), [line|rest], false }
+ :done -> {Enum.reverse(acc), [line|rest], false}
chopped -> process_list_next(rest, count, false, [chopped|acc])
end
end
defp process_list_next([<<bullet, ?\s, _ :: binary>> | _] = rest, _count, _done, acc) when bullet in @bullets do
- { Enum.reverse(acc), rest, false }
+ {Enum.reverse(acc), rest, false}
end
defp process_list_next(["" | rest], count, _done, acc) do
@@ -143,7 +143,7 @@ defmodule IO.ANSI.Docs do
end
defp process_list_next(rest, _count, done, acc) do
- { Enum.reverse(acc), rest, done }
+ {Enum.reverse(acc), rest, done}
end
defp list_next(<<bullet, ?\s, _ :: binary>>, 0) when bullet in @bullets, do: :done
@@ -166,7 +166,7 @@ defmodule IO.ANSI.Docs do
end
defp process_text([line | rest], para, indent, true, colors) do
- { stripped, count } = strip_spaces(line, 0)
+ {stripped, count} = strip_spaces(line, 0)
case stripped do
<<bullet, ?\s, item :: binary>> when bullet in @bullets ->
write_text(Enum.reverse(para), indent, true, colors)
@@ -227,7 +227,7 @@ defmodule IO.ANSI.Docs do
end
defp write_with_wrap(words, available, indent, first) do
- { words, rest } = take_words(words, available, [])
+ {words, rest} = take_words(words, available, [])
IO.puts (if first, do: "", else: indent) <> Enum.join(words, " ")
write_with_wrap(rest, available, indent, false)
end
@@ -242,16 +242,16 @@ defmodule IO.ANSI.Docs do
# No space but we got no words
acc == [] ->
- { [word], words }
+ {[word], words}
# Otherwise
true ->
- { Enum.reverse(acc), [word|words] }
+ {Enum.reverse(acc), [word|words]}
end
end
defp take_words([], _available, acc) do
- { Enum.reverse(acc), [] }
+ {Enum.reverse(acc), []}
end
defp length_without_escape(<< ?\e, ?[, _, _, ?m, rest :: binary >>, count) do
@@ -264,7 +264,7 @@ defmodule IO.ANSI.Docs do
defp length_without_escape(rest, count) do
case String.next_grapheme(rest) do
- { _, rest } -> length_without_escape(rest, count + 1)
+ {_, rest} -> length_without_escape(rest, count + 1)
nil -> count
end
end
@@ -370,7 +370,7 @@ defmodule IO.ANSI.Docs do
defp column_width() do
case :io.columns do
- { :ok, width } -> min(width, 80)
+ {:ok, width} -> min(width, 80)
_ -> 80
end
end
diff --git a/lib/elixir/lib/kernel.ex b/lib/elixir/lib/kernel.ex
index 0ea8d3405..149e917ff 100644
--- a/lib/elixir/lib/kernel.ex
+++ b/lib/elixir/lib/kernel.ex
@@ -960,7 +960,7 @@ defmodule Kernel do
:hello
"""
- @spec send(dest :: pid | port | atom | { atom, node }, msg) :: msg when msg: any
+ @spec send(dest :: pid | port | atom | {atom, node}, msg) :: msg when msg: any
def send(dest, msg) do
:erlang.send(dest, msg)
end
@@ -998,10 +998,10 @@ defmodule Kernel do
## Examples
current = Kernel.self
- child = spawn(fn -> send current, { Kernel.self, 1 + 2 } end)
+ child = spawn(fn -> send current, {Kernel.self, 1 + 2} end)
receive do
- { ^child, 3 } -> IO.puts "Received 3 back"
+ {^child, 3} -> IO.puts "Received 3 back"
end
"""
@@ -1040,10 +1040,10 @@ defmodule Kernel do
## Examples
current = Kernel.self
- child = spawn_link(fn -> send current, { Kernel.self, 1 + 2 } end)
+ child = spawn_link(fn -> send current, {Kernel.self, 1 + 2} end)
receive do
- { ^child, 3 } -> IO.puts "Received 3 back"
+ {^child, 3} -> IO.puts "Received 3 back"
end
"""
@@ -1479,7 +1479,7 @@ defmodule Kernel do
## Example
- iex> tuple = { :foo, :bar, 3 }
+ iex> tuple = {:foo, :bar, 3}
iex> elem(tuple, 1)
:bar
@@ -1496,9 +1496,9 @@ defmodule Kernel do
## Example
- iex> tuple = { :foo, :bar, 3 }
+ iex> tuple = {:foo, :bar, 3}
iex> set_elem(tuple, 0, :baz)
- { :baz, :bar, 3 }
+ {:baz, :bar, 3}
"""
@spec set_elem(tuple, non_neg_integer, term) :: tuple
@@ -1556,7 +1556,7 @@ defmodule Kernel do
"""
defmacro !(arg)
- defmacro !({ :!, _, [arg] }) do
+ defmacro !({:!, _, [arg]}) do
quote do
case unquote(arg) do
unquote(cond_var) when unquote(cond_var) in [false, nil] -> false
@@ -1591,13 +1591,13 @@ defmodule Kernel do
"""
defmacro left <> right do
- concats = extract_concatenations({ :<>, [], [left, right] })
+ concats = extract_concatenations({:<>, [], [left, right]})
quote do: << unquote_splicing(concats) >>
end
# Extracts concatenations in order to optimize many
# concatenations into one single clause.
- defp extract_concatenations({ :<>, _, [left, right] }) do
+ defp extract_concatenations({:<>, _, [left, right]}) do
[wrap_concatenation(left)|extract_concatenations(right)]
end
@@ -1610,7 +1610,7 @@ defmodule Kernel do
end
defp wrap_concatenation(other) do
- { :::, [], [other, { :binary, [], nil }] }
+ {:::, [], [other, {:binary, [], nil}]}
end
@doc """
@@ -1648,7 +1648,7 @@ defmodule Kernel do
quote do
:erlang.error RuntimeError.exception(message: unquote(msg))
end
- { :<<>>, _, _ } = msg ->
+ {:<<>>, _, _} = msg ->
quote do
:erlang.error RuntimeError.exception(message: unquote(msg))
end
@@ -1930,8 +1930,8 @@ defmodule Kernel do
struct(apply(struct, :__struct__, []), kv)
end
- def struct(%{ __struct__: _ } = struct, kv) do
- Enum.reduce(kv, struct, fn { k, v }, acc ->
+ def struct(%{__struct__: _} = struct, kv) do
+ Enum.reduce(kv, struct, fn {k, v}, acc ->
case :maps.is_key(k, acc) and k != :__struct__ do
true -> :maps.put(k, v, acc)
false -> acc
@@ -2009,7 +2009,7 @@ defmodule Kernel do
Guard clauses can also be given to the match:
list = [{:a, 1}, {:b, 2}, {:a, 3}]
- Enum.filter list, &match?({:a, x } when x < 2, &1)
+ Enum.filter list, &match?({:a, x} when x < 2, &1)
However, variables assigned in the match will not be available
outside of the function call:
@@ -2024,7 +2024,7 @@ defmodule Kernel do
defmacro match?(pattern, expr)
# Special case underscore since it always matches
- defmacro match?({ :_, _, atom }, _right) when is_atom(atom) do
+ defmacro match?({:_, _, atom}, _right) when is_atom(atom) do
true
end
@@ -2083,7 +2083,7 @@ defmodule Kernel do
defmacro @(expr)
# Typespecs attributes are special cased by the compiler so far
- defmacro @({ name, _, args }) do
+ defmacro @({name, _, args}) do
# Check for Macro as it is compiled later than Module
case bootstraped?(Module) do
false -> nil
@@ -2227,14 +2227,14 @@ defmodule Kernel do
end
defp do_binding(context, vars, in_match) do
- for { v, c } <- vars, c == context, v != :_@CALLER do
- { v, wrap_binding(in_match, { v, [], c }) }
+ for {v, c} <- vars, c == context, v != :_@CALLER do
+ {v, wrap_binding(in_match, {v, [], c})}
end
end
defp do_binding(list, context, vars, in_match) do
- for { v, c } <- vars, c == context, :lists.member(v, list) do
- { v, wrap_binding(in_match, { v, [], c }) }
+ for {v, c} <- vars, c == context, :lists.member(v, list) do
+ {v, wrap_binding(in_match, {v, [], c})}
end
end
@@ -2312,11 +2312,11 @@ defmodule Kernel do
"""
defmacro cond([do: pairs]) do
- [{ :->, meta, [[condition], clause] }|t] = :lists.reverse pairs
+ [{:->, meta, [[condition], clause]}|t] = :lists.reverse pairs
new_acc =
case condition do
- { :_, _, atom } when is_atom(atom) ->
+ {:_, _, atom} when is_atom(atom) ->
raise ArgumentError, message: <<"unbound variable _ inside cond. ",
"If you want the last clause to match, you probably meant to use true ->">>
x when is_atom(x) and x != false and x != nil ->
@@ -2343,7 +2343,7 @@ defmodule Kernel do
# end
# end
#
- defp build_cond_clauses([{ :->, new, [[condition], clause] }|t], acc, old) do
+ defp build_cond_clauses([{:->, new, [[condition], clause]}|t], acc, old) do
clauses = [falsy_clause(old, acc), truthy_clause(new, clause)]
acc = quote do: (case unquote(condition), do: unquote(clauses))
build_cond_clauses(t, acc, new)
@@ -2352,23 +2352,23 @@ defmodule Kernel do
defp build_cond_clauses([], acc, _), do: acc
defp falsy_clause(meta, acc) do
- { :->, meta, [[quote(do: unquote(cond_var) when unquote(cond_var) in [false, nil])], acc] }
+ {:->, meta, [[quote(do: unquote(cond_var) when unquote(cond_var) in [false, nil])], acc]}
end
defp truthy_clause(meta, clause) do
- { :->, meta, [[quote(do: _)], clause] }
+ {:->, meta, [[quote(do: _)], clause]}
end
# Setting cond: true in metadata turns on a small optimization
# in Elixir compiler. In the long run, we want to bring this
# optimization to Elixir land, but not right now.
defp cond_var do
- { :x, [cond: true], Kernel }
+ {:x, [cond: true], Kernel}
end
defp get_line(meta) do
case :lists.keyfind(:line, 1, meta) do
- { :line, line } -> line
+ {:line, line} -> line
false -> 0
end
end
@@ -2429,7 +2429,7 @@ defmodule Kernel do
"""
defmacro destructure(left, right) when is_list(left) do
Enum.reduce left, right, fn item, acc ->
- { :case, meta, args } =
+ {:case, meta, args} =
quote do
case unquote(acc) do
[unquote(item)|t] ->
@@ -2438,7 +2438,7 @@ defmodule Kernel do
unquote(item) = nil
end
end
- { :case, [{:export_head,true}|meta], args }
+ {:case, [{:export_head,true}|meta], args}
end
end
@@ -2462,7 +2462,7 @@ defmodule Kernel do
"""
defmacro first .. last do
- { :{}, [], [Elixir.Range, first, last] }
+ {:{}, [], [Elixir.Range, first, last]}
end
@doc """
@@ -2583,7 +2583,7 @@ defmodule Kernel do
"""
defmacro left |> right do
- :lists.foldl fn { x, pos }, acc -> Macro.pipe(acc, x, pos) end, left, Macro.unpipe(right)
+ :lists.foldl fn {x, pos}, acc -> Macro.pipe(acc, x, pos) end, left, Macro.unpipe(right)
end
@doc """
@@ -2651,7 +2651,7 @@ defmodule Kernel do
# it is loaded so we allow the ParallelCompiler to solve
# conflicts.
case :code.ensure_loaded(atom) do
- { :error, _ } ->
+ {:error, _} ->
:elixir_aliases.ensure_loaded(caller.line, atom, :elixir_env.ex_to_env(caller))
_ ->
raise ArgumentError, message: "cannot access module #{inspect atom} because it is not a record"
@@ -2726,7 +2726,7 @@ defmodule Kernel do
unquote(comp(left, x)) or unquote(acc)
end
end, comp(left, h), t)
- { :{}, _, [Elixir.Range, first, last] } ->
+ {:{}, _, [Elixir.Range, first, last]} ->
in_range(left, Macro.expand(first, __CALLER__), Macro.expand(last, __CALLER__))
first .. last ->
# This range came from a module attribute, so it is a
@@ -2790,7 +2790,7 @@ defmodule Kernel do
do_var!(var, [], context, __CALLER__)
end
- defmacro var!({ name, meta, atom }, context) when is_atom(name) and is_atom(atom) do
+ defmacro var!({name, meta, atom}, context) when is_atom(name) and is_atom(atom) do
do_var!(name, meta, context, __CALLER__)
end
@@ -2801,11 +2801,11 @@ defmodule Kernel do
defp do_var!(name, meta, context, env) do
# Remove counter and force them to be vars
meta = :lists.keydelete(:counter, 1, meta)
- meta = :lists.keystore(:var, 1, meta, { :var, true })
+ meta = :lists.keystore(:var, 1, meta, {:var, true})
case Macro.expand(context, env) do
x when is_atom(x) ->
- { name, meta, x }
+ {name, meta, x}
x ->
raise ArgumentError, message: "expected var! context to expand to an atom, got: #{Macro.to_string(x)}"
end
@@ -2824,10 +2824,10 @@ defmodule Kernel do
alias
end
- defmacro alias!({ :__aliases__, meta, args }) do
+ defmacro alias!({:__aliases__, meta, args}) do
# Simply remove the alias metadata from the node
# so it does not affect expansion.
- { :__aliases__, :lists.keydelete(:alias, 1, meta), args }
+ {:__aliases__, :lists.keydelete(:alias, 1, meta), args}
end
## Definitions implemented in Elixir
@@ -2889,22 +2889,22 @@ defmodule Kernel do
false -> alias
end
- { expanded, with_alias } =
+ {expanded, with_alias} =
case boot? and is_atom(expanded) do
true ->
# Expand the module considering the current environment/nesting
full = expand_module(alias, expanded, env)
# Generate the alias for this module definition
- { new, old } = module_nesting(env_module(env), full)
+ {new, old} = module_nesting(env_module(env), full)
meta = [defined: full, context: true] ++ alias_meta(alias)
- { full, { :alias, meta, [old, [as: new, warn: false]] } }
+ {full, {:alias, meta, [old, [as: new, warn: false]]}}
false ->
- { expanded, nil }
+ {expanded, nil}
end
- { escaped, _ } = :elixir_quote.escape(block, false)
+ {escaped, _} = :elixir_quote.escape(block, false)
module_vars = module_vars(env_vars(env), 0)
quote do
@@ -2914,7 +2914,7 @@ defmodule Kernel do
end
end
- defp alias_meta({ :__aliases__, meta, _ }), do: meta
+ defp alias_meta({:__aliases__, meta, _}), do: meta
defp alias_meta(_), do: []
# defmodule :foo
@@ -2922,11 +2922,11 @@ defmodule Kernel do
do: raw
# defmodule Hello
- defp expand_module({ :__aliases__, _, [h] }, _module, env),
+ defp expand_module({:__aliases__, _, [h]}, _module, env),
do: :elixir_aliases.concat([env.module, h])
# defmodule Hello.World
- defp expand_module({ :__aliases__, _, _ } = alias, module, env) do
+ defp expand_module({:__aliases__, _, _} = alias, module, env) do
case :elixir_aliases.expand(alias, env.aliases, env.macro_aliases, env.lexical_tracker) do
atom when is_atom(atom) ->
module
@@ -2940,15 +2940,15 @@ defmodule Kernel do
do: :elixir_aliases.concat([env.module, module])
# quote vars to be injected into the module definition
- defp module_vars([{ key, kind }|vars], counter) do
+ defp module_vars([{key, kind}|vars], counter) do
var =
case is_atom(kind) do
- true -> { key, [], kind }
- false -> { key, [counter: kind], nil }
+ true -> {key, [], kind}
+ false -> {key, [counter: kind], nil}
end
args = [key, kind, binary_to_atom(<<"_@", integer_to_binary(counter)::binary>>), var]
- [{ :{}, [], args }|module_vars(vars, counter+1)]
+ [{:{}, [], args}|module_vars(vars, counter+1)]
end
defp module_vars([], _counter) do
@@ -2963,19 +2963,19 @@ defmodule Kernel do
# Examples:
#
# module_nesting('Elixir.Foo.Bar', 'Elixir.Foo.Bar.Baz.Bat')
- # { 'Elixir.Baz', 'Elixir.Foo.Bar.Baz' }
+ # {'Elixir.Baz', 'Elixir.Foo.Bar.Baz'}
#
# In case there is no nesting/no module:
#
# module_nesting(nil, 'Elixir.Foo.Bar.Baz.Bat')
- # { false, 'Elixir.Foo.Bar.Baz.Bat' }
+ # {false, 'Elixir.Foo.Bar.Baz.Bat'}
#
defp module_nesting(nil, full),
- do: { false, full }
+ do: {false, full}
defp module_nesting(prefix, full) do
case split_module(prefix) do
- [] -> { false, full }
+ [] -> {false, full}
prefix -> module_nesting(prefix, split_module(full), [], full)
end
end
@@ -2983,10 +2983,10 @@ defmodule Kernel do
defp module_nesting([x|t1], [x|t2], acc, full),
do: module_nesting(t1, t2, [x|acc], full)
defp module_nesting([], [h|_], acc, _full),
- do: { binary_to_atom(<<"Elixir.", h::binary>>),
- :elixir_aliases.concat(:lists.reverse([h|acc])) }
+ do: {binary_to_atom(<<"Elixir.", h::binary>>),
+ :elixir_aliases.concat(:lists.reverse([h|acc]))}
defp module_nesting(_, _, _acc, full),
- do: { false, full }
+ do: {false, full}
defp split_module(atom) do
case :binary.split(atom_to_binary(atom), ".", [:global]) do
@@ -3083,8 +3083,8 @@ defmodule Kernel do
assert_no_function_scope(env, kind, 2)
line = env_line(env)
- { call, uc } = :elixir_quote.escape(call, true)
- { expr, ue } = :elixir_quote.escape(expr, true)
+ {call, uc} = :elixir_quote.escape(call, true)
+ {expr, ue} = :elixir_quote.escape(expr, true)
# Do not check clauses if any expression was unquoted
check_clauses = not(ue or uc)
@@ -3140,7 +3140,7 @@ defmodule Kernel do
follows:
inspect User.new, records: false
- #=> { User, nil, 0 }
+ #=> {User, nil, 0}
In addition to defining readers and writers for each attribute, Elixir also
defines an `update_#{attribute}` function to update the value. Such
@@ -3201,14 +3201,14 @@ defmodule Kernel do
arities will be defined to manipulate the underlying record:
# To create records
- user() #=> { :user, "José", 25 }
- user(age: 26) #=> { :user, "José", 26 }
+ user() #=> {:user, "José", 25}
+ user(age: 26) #=> {:user, "José", 26}
# To get a field from the record
user(record, :name) #=> "José"
# To update the record
- user(record, age: 26) #=> { :user, "José", 26 }
+ user(record, age: 26) #=> {:user, "José", 26}
By default, Elixir uses the record name as the first element of the tuple.
In some cases though, this might be undesirable and one can explicitly
@@ -3221,7 +3221,7 @@ defmodule Kernel do
This way, the record created will have `MyServer` as the first element,
not `:state`:
- state() #=> { MyServer, nil }
+ state() #=> {MyServer, nil}
"""
defmacro defrecordp(name, tag \\ nil, fields) do
@@ -3286,15 +3286,15 @@ defmodule Kernel do
quote bind_quoted: [kv: kv] do
# Expand possible macros that return KVs.
kv = Macro.expand(kv, __ENV__)
- { fields, types } = Record.Backend.split_fields_and_types(:defstruct, kv)
+ {fields, types} = Record.Backend.split_fields_and_types(:defstruct, kv)
- if :code.ensure_loaded(Kernel.Typespec) == { :module, Kernel.Typespec } and
+ if :code.ensure_loaded(Kernel.Typespec) == {:module, Kernel.Typespec} and
not Kernel.Typespec.defines_type?(__MODULE__, :t, 0) do
- @type t :: %{ unquote_splicing(types), __struct__: __MODULE__ }
+ @type t :: %{unquote_splicing(types), __struct__: __MODULE__}
end
def __struct__() do
- %{ unquote_splicing(fields), __struct__: __MODULE__ }
+ %{unquote_splicing(fields), __struct__: __MODULE__}
end
end
end
@@ -3342,10 +3342,10 @@ defmodule Kernel do
exception messages.
"""
defmacro defexception(name, fields, do_block \\ []) do
- { fields, do_block } =
+ {fields, do_block} =
case is_list(fields) and Keyword.get(fields, :do, false) do
- false -> { fields, do_block }
- other -> { Keyword.delete(fields, :do), [do: other] }
+ false -> {fields, do_block}
+ other -> {Keyword.delete(fields, :do), [do: other]}
end
do_block = Keyword.put(do_block, :do, quote do
@@ -3366,7 +3366,7 @@ defmodule Kernel do
record = Record.Deprecated.defrecord(name, fields, do_block)
quote do
- { :module, name, _, _ } = unquote(record)
+ {:module, name, _, _} = unquote(record)
unless :erlang.function_exported(name, :message, 1) do
Kernel.raise "expected exception #{inspect name} to implement message/1"
@@ -3668,9 +3668,9 @@ defmodule Kernel do
append_first = Keyword.get(opts, :append_first, false)
for fun <- List.wrap(funs) do
- { name, args } =
+ {name, args} =
case Macro.decompose_call(fun) do
- { _, _ } = pair -> pair
+ {_, _} = pair -> pair
_ -> raise ArgumentError, message: "invalid syntax in defdelegate #{Macro.to_string(fun)}"
end
@@ -3721,8 +3721,8 @@ defmodule Kernel do
"foo"
"""
- defmacro sigil_s({ :<<>>, line, pieces }, []) do
- { :<<>>, line, Macro.unescape_tokens(pieces) }
+ defmacro sigil_s({:<<>>, line, pieces}, []) do
+ {:<<>>, line, Macro.unescape_tokens(pieces)}
end
@doc """
@@ -3738,7 +3738,7 @@ defmodule Kernel do
'f\\\#{o}o'
"""
- defmacro sigil_C({ :<<>>, _line, [string] }, []) when is_binary(string) do
+ defmacro sigil_C({:<<>>, _line, [string]}, []) when is_binary(string) do
List.from_char_data!(string)
end
@@ -3758,12 +3758,12 @@ defmodule Kernel do
# We can skip the runtime conversion if we are
# creating a binary made solely of series of chars.
- defmacro sigil_c({ :<<>>, _line, [string] }, []) when is_binary(string) do
+ defmacro sigil_c({:<<>>, _line, [string]}, []) when is_binary(string) do
List.from_char_data!(Macro.unescape_string(string))
end
- defmacro sigil_c({ :<<>>, line, pieces }, []) do
- binary = { :<<>>, line, Macro.unescape_tokens(pieces) }
+ defmacro sigil_c({:<<>>, line, pieces}, []) do
+ binary = {:<<>>, line, Macro.unescape_tokens(pieces)}
quote do: List.from_char_data!(unquote(binary))
end
@@ -3776,14 +3776,14 @@ defmodule Kernel do
true
"""
- defmacro sigil_r({ :<<>>, _line, [string] }, options) when is_binary(string) do
+ defmacro sigil_r({:<<>>, _line, [string]}, options) when is_binary(string) do
binary = Macro.unescape_string(string, fn(x) -> Regex.unescape_map(x) end)
regex = Regex.compile!(binary, :binary.list_to_bin(options))
Macro.escape(regex)
end
- defmacro sigil_r({ :<<>>, line, pieces }, options) do
- binary = { :<<>>, line, Macro.unescape_tokens(pieces, fn(x) -> Regex.unescape_map(x) end) }
+ defmacro sigil_r({:<<>>, line, pieces}, options) do
+ binary = {:<<>>, line, Macro.unescape_tokens(pieces, fn(x) -> Regex.unescape_map(x) end)}
quote do: Regex.compile!(unquote(binary), unquote(:binary.list_to_bin(options)))
end
@@ -3797,7 +3797,7 @@ defmodule Kernel do
true
"""
- defmacro sigil_R({ :<<>>, _line, [string] }, options) when is_binary(string) do
+ defmacro sigil_R({:<<>>, _line, [string]}, options) when is_binary(string) do
regex = Regex.compile!(string, :binary.list_to_bin(options))
Macro.escape(regex)
end
@@ -3824,12 +3824,12 @@ defmodule Kernel do
"""
- defmacro sigil_w({ :<<>>, _line, [string] }, modifiers) when is_binary(string) do
+ defmacro sigil_w({:<<>>, _line, [string]}, modifiers) when is_binary(string) do
split_words(Macro.unescape_string(string), modifiers)
end
- defmacro sigil_w({ :<<>>, line, pieces }, modifiers) do
- binary = { :<<>>, line, Macro.unescape_tokens(pieces) }
+ defmacro sigil_w({:<<>>, line, pieces}, modifiers) do
+ binary = {:<<>>, line, Macro.unescape_tokens(pieces)}
split_words(binary, modifiers)
end
@@ -3849,7 +3849,7 @@ defmodule Kernel do
["foo", "\\\#{bar}", "baz"]
"""
- defmacro sigil_W({ :<<>>, _line, [string] }, modifiers) when is_binary(string) do
+ defmacro sigil_W({:<<>>, _line, [string]}, modifiers) when is_binary(string) do
split_words(string, modifiers)
end
@@ -3884,11 +3884,11 @@ defmodule Kernel do
# We need this check only for bootstrap purposes.
# Once Kernel is loaded and we recompile, it is a no-op.
case :code.ensure_loaded(Kernel) do
- { :module, _ } ->
+ {:module, _} ->
defp bootstraped?(_), do: true
defp internal?, do: false
- { :error, _ } ->
- defp bootstraped?(module), do: :code.ensure_loaded(module) == { :module, module }
+ {:error, _} ->
+ defp bootstraped?(module), do: :code.ensure_loaded(module) == {:module, module}
defp internal?, do: :elixir_compiler.get_opt(:internal)
end
@@ -3919,8 +3919,8 @@ defmodule Kernel do
end
end
- defp expand_compact([{ :compact, false }|t]), do: expand_compact(t)
- defp expand_compact([{ :compact, true }|t]), do: [:compact|expand_compact(t)]
+ defp expand_compact([{:compact, false}|t]), do: expand_compact(t)
+ defp expand_compact([{:compact, true}|t]), do: [:compact|expand_compact(t)]
defp expand_compact([h|t]), do: [h|expand_compact(t)]
defp expand_compact([]), do: []
end
diff --git a/lib/elixir/lib/kernel/cli.ex b/lib/elixir/lib/kernel/cli.ex
index 029bbf359..a2be9f74c 100644
--- a/lib/elixir/lib/kernel/cli.ex
+++ b/lib/elixir/lib/kernel/cli.ex
@@ -11,12 +11,12 @@ defmodule Kernel.CLI do
def main(argv) do
argv = for arg <- argv, do: String.from_char_data!(arg)
- { config, argv } = process_argv(argv, Kernel.CLI.Config.new)
+ {config, argv} = process_argv(argv, Kernel.CLI.Config.new)
System.argv(argv)
run fn ->
command_results = Enum.map(Enum.reverse(config.commands), &process_command(&1, config))
- command_errors = for { :error, msg } <- command_results, do: msg
+ command_errors = for {:error, msg} <- command_results, do: msg
errors = Enum.reverse(config.errors) ++ command_errors
if errors != [] do
@@ -76,10 +76,10 @@ defmodule Kernel.CLI do
defp shared_option?(list, config, callback) do
case process_shared(list, config) do
- { [h|hs], _ } when h == hd(list) ->
+ {[h|hs], _} when h == hd(list) ->
new_config = config.update_errors &["#{h} : Unknown option" | &1]
callback.(hs, new_config)
- { new_list, new_config } ->
+ {new_list, new_config} ->
callback.(new_list, new_config)
end
end
@@ -100,11 +100,11 @@ defmodule Kernel.CLI do
@elixir_internals [:elixir_compiler, :elixir_module]
- defp prune_stacktrace([{ mod, _, _, _ }|t]) when mod in @elixir_internals do
+ defp prune_stacktrace([{mod, _, _, _}|t]) when mod in @elixir_internals do
prune_stacktrace(t)
end
- defp prune_stacktrace([{ __MODULE__, :wrapper, 1, _ }|_]) do
+ defp prune_stacktrace([{__MODULE__, :wrapper, 1, _}|_]) do
[]
end
@@ -162,13 +162,13 @@ defmodule Kernel.CLI do
end
defp process_shared(list, config) do
- { list, config }
+ {list, config}
end
# Process init options
defp process_argv(["--"|t], config) do
- { config, t }
+ {config, t}
end
defp process_argv(["+elixirc"|t], config) do
@@ -180,7 +180,7 @@ defmodule Kernel.CLI do
end
defp process_argv(["-S", h|t], config) do
- { config.update_commands(&[{:script, h}|&1]), t }
+ {config.update_commands(&[{:script, h}|&1]), t}
end
defp process_argv([h|t] = list, config) do
@@ -188,18 +188,18 @@ defmodule Kernel.CLI do
"-" <> _ ->
shared_option? list, config, &process_argv(&1, &2)
_ ->
- { config.update_commands(&[{:file, h}|&1]), t }
+ {config.update_commands(&[{:file, h}|&1]), t}
end
end
defp process_argv([], config) do
- { config, [] }
+ {config, []}
end
# Process compiler options
defp process_compiler(["--"|t], config) do
- { config, t }
+ {config, t}
end
defp process_compiler(["-o", h|t], config) do
@@ -237,13 +237,13 @@ defmodule Kernel.CLI do
end
defp process_compiler([], config) do
- { config.update_commands(&[{:compile, config.compile}|&1]), [] }
+ {config.update_commands(&[{:compile, config.compile}|&1]), []}
end
# Process iex options
defp process_iex(["--"|t], config) do
- { config, t }
+ {config, t}
end
# This clause is here so that Kernel.CLI does not error out with "unknown
@@ -257,7 +257,7 @@ defmodule Kernel.CLI do
end
defp process_iex(["-S", h|t], config) do
- { config.update_commands(&[{:script, h}|&1]), t }
+ {config.update_commands(&[{:script, h}|&1]), t}
end
defp process_iex([h|t] = list, config) do
@@ -265,12 +265,12 @@ defmodule Kernel.CLI do
"-" <> _ ->
shared_option? list, config, &process_iex(&1, &2)
_ ->
- { config.update_commands(&[{:file, h}|&1]), t }
+ {config.update_commands(&[{:file, h}|&1]), t}
end
end
defp process_iex([], config) do
- { config, [] }
+ {config, []}
end
# Process commands
@@ -279,7 +279,7 @@ defmodule Kernel.CLI do
if Node.alive? do
wrapper fn -> Node.set_cookie(binary_to_atom(h)) end
else
- { :error, "--cookie : Cannot set cookie if the node is not alive (set --name or --sname)" }
+ {:error, "--cookie : Cannot set cookie if the node is not alive (set --name or --sname)"}
end
end
@@ -289,9 +289,9 @@ defmodule Kernel.CLI do
defp process_command({:app, app}, _config) when is_binary(app) do
case :application.ensure_all_started(binary_to_atom(app)) do
- { :error, reason } ->
- { :error, "--app : Could not start application #{app}: #{inspect reason}" }
- { :ok, _ } ->
+ {:error, reason} ->
+ {:error, "--app : Could not start application #{app}: #{inspect reason}"}
+ {:ok, _} ->
:ok
end
end
@@ -300,7 +300,7 @@ defmodule Kernel.CLI do
if exec = find_elixir_executable(file) do
wrapper fn -> Code.require_file(exec) end
else
- { :error, "-S : Could not find executable #{file}" }
+ {:error, "-S : Could not find executable #{file}"}
end
end
@@ -308,7 +308,7 @@ defmodule Kernel.CLI do
if :filelib.is_regular(file) do
wrapper fn -> Code.require_file(file) end
else
- { :error, "No file named #{file}" }
+ {:error, "No file named #{file}"}
end
end
@@ -320,7 +320,7 @@ defmodule Kernel.CLI do
if files != [] do
wrapper fn -> Enum.map files, &Code.require_file(&1) end
else
- { :error, "-r : No files matched pattern #{pattern}" }
+ {:error, "-r : No files matched pattern #{pattern}"}
end
end
@@ -332,7 +332,7 @@ defmodule Kernel.CLI do
if files != [] do
wrapper fn -> Kernel.ParallelRequire.files(files) end
else
- { :error, "-pr : No files matched pattern #{pattern}" }
+ {:error, "-pr : No files matched pattern #{pattern}"}
end
end
@@ -340,24 +340,24 @@ defmodule Kernel.CLI do
:filelib.ensure_dir(:filename.join(config.output, "."))
case match_regular_files(patterns) do
- { :ok, [] } ->
- { :error, "No files matched provided patterns" }
- { :ok, files } ->
+ {:ok, []} ->
+ {:error, "No files matched provided patterns"}
+ {:ok, files} ->
wrapper fn ->
Code.compiler_options(config.compiler_options)
Kernel.ParallelCompiler.files_to_path(files, config.output,
each_file: fn file -> if config.verbose_compile do IO.puts "Compiled #{file}" end end)
end
- { :missing, missing } ->
- { :error, "No files matched pattern(s) #{Enum.join(missing, ",")}" }
+ {:missing, missing} ->
+ {:error, "No files matched pattern(s) #{Enum.join(missing, ",")}"}
end
end
defp match_regular_files(patterns) do
matched_files = Enum.map patterns, fn(pattern) ->
case Path.wildcard(pattern) do
- [] -> { :missing, pattern }
- files -> { :ok, files }
+ [] -> {:missing, pattern}
+ files -> {:ok, files}
end
end
@@ -372,9 +372,9 @@ defmodule Kernel.CLI do
if missing_patterns == [] do
files = Enum.uniq(Enum.concat(files))
files = Enum.filter files, &:filelib.is_regular(&1)
- { :ok, files }
+ {:ok, files}
else
- { :missing, Enum.uniq(missing_patterns) }
+ {:missing, Enum.uniq(missing_patterns)}
end
end
@@ -389,7 +389,7 @@ defmodule Kernel.CLI do
# a .bat file that must be in the same directory as
# the actual Elixir executable.
case :os.type() do
- { :win32, _ } ->
+ {:win32, _} ->
exec = Path.rootname(exec)
if File.regular?(exec), do: exec
_ ->
diff --git a/lib/elixir/lib/kernel/error_handler.ex b/lib/elixir/lib/kernel/error_handler.ex
index a42fe10a0..dfc1dbb58 100644
--- a/lib/elixir/lib/kernel/error_handler.ex
+++ b/lib/elixir/lib/kernel/error_handler.ex
@@ -25,15 +25,15 @@ defmodule Kernel.ErrorHandler do
defp ensure_loaded(module) do
case Code.ensure_loaded(module) do
- { :module, _ } -> []
- { :error, _ } ->
+ {:module, _} -> []
+ {:error, _} ->
parent = :erlang.get(:elixir_compiler_pid)
ref = :erlang.make_ref
- send parent, { :waiting, module, self(), ref, module }
+ send parent, {:waiting, module, self(), ref, module}
:erlang.garbage_collect(self)
receive do
- { ^ref, :ready } -> :ok
- { ^ref, :release } -> release()
+ {^ref, :ready} -> :ok
+ {^ref, :release} -> release()
end
end
end
diff --git a/lib/elixir/lib/kernel/lexical_tracker.ex b/lib/elixir/lib/kernel/lexical_tracker.ex
index d17444df9..4c960fdbb 100644
--- a/lib/elixir/lib/kernel/lexical_tracker.ex
+++ b/lib/elixir/lib/kernel/lexical_tracker.ex
@@ -21,7 +21,7 @@ defmodule Kernel.LexicalTracker do
# scope may be long gone, so it has no associated PID.
if pid = to_pid(arg) do
ets = :gen_server.call(pid, :ets, @timeout)
- :ets.match(ets, { :"$1", :_, :_ }) |> List.flatten
+ :ets.match(ets, {:"$1", :_, :_}) |> List.flatten
else
[]
end
@@ -30,7 +30,7 @@ defmodule Kernel.LexicalTracker do
defp to_pid(pid) when is_pid(pid), do: pid
defp to_pid(mod) when is_atom(mod) do
table = :elixir_module.data_table(mod)
- [{ _, val }] = :ets.lookup(table, :__lexical_tracker)
+ [{_, val}] = :ets.lookup(table, :__lexical_tracker)
val
end
@@ -39,7 +39,7 @@ defmodule Kernel.LexicalTracker do
# Starts the tracker and returns its pid.
@doc false
def start_link do
- { :ok, pid } = :gen_server.start_link(__MODULE__, [], [])
+ {:ok, pid} = :gen_server.start_link(__MODULE__, [], [])
pid
end
@@ -50,27 +50,27 @@ defmodule Kernel.LexicalTracker do
@doc false
def add_import(pid, module, line, warn) do
- :gen_server.cast(pid, { :add_import, module, line, warn })
+ :gen_server.cast(pid, {:add_import, module, line, warn})
end
@doc false
def add_alias(pid, module, line, warn) do
- :gen_server.cast(pid, { :add_alias, module, line, warn })
+ :gen_server.cast(pid, {:add_alias, module, line, warn})
end
@doc false
def remote_dispatch(pid, module) do
- :gen_server.cast(pid, { :remote_dispatch, module })
+ :gen_server.cast(pid, {:remote_dispatch, module})
end
@doc false
def import_dispatch(pid, module) do
- :gen_server.cast(pid, { :import_dispatch, module })
+ :gen_server.cast(pid, {:import_dispatch, module})
end
@doc false
def alias_dispatch(pid, module) do
- :gen_server.cast(pid, { :alias_dispatch, module })
+ :gen_server.cast(pid, {:alias_dispatch, module})
end
@doc false
@@ -86,8 +86,8 @@ defmodule Kernel.LexicalTracker do
defp unused(pid, pos) do
ets = :gen_server.call(pid, :ets, @timeout)
:ets.foldl(fn
- { module, _, _ } = tuple, acc when is_integer(:erlang.element(pos, tuple)) ->
- [{ module, :erlang.element(pos, tuple) }|acc]
+ {module, _, _} = tuple, acc when is_integer(:erlang.element(pos, tuple)) ->
+ [{module, :erlang.element(pos, tuple)}|acc]
_, acc ->
acc
end, [], ets) |> Enum.sort
@@ -97,52 +97,52 @@ defmodule Kernel.LexicalTracker do
def init([]) do
- { :ok, :ets.new(:lexical, [:protected]) }
+ {:ok, :ets.new(:lexical, [:protected])}
end
def handle_call(:ets, _from, d) do
- { :reply, d, d }
+ {:reply, d, d}
end
def handle_call(request, _from, d) do
- { :stop, { :bad_call, request }, d }
+ {:stop, {:bad_call, request}, d}
end
- def handle_cast({ :remote_dispatch, module }, d) do
+ def handle_cast({:remote_dispatch, module}, d) do
add_module(d, module)
- { :noreply, d }
+ {:noreply, d}
end
- def handle_cast({ :import_dispatch, module }, d) do
+ def handle_cast({:import_dispatch, module}, d) do
add_dispatch(d, module, @import)
- { :noreply, d }
+ {:noreply, d}
end
- def handle_cast({ :alias_dispatch, module }, d) do
+ def handle_cast({:alias_dispatch, module}, d) do
add_dispatch(d, module, @alias)
- { :noreply, d }
+ {:noreply, d}
end
- def handle_cast({ :add_import, module, line, warn }, d) do
+ def handle_cast({:add_import, module, line, warn}, d) do
add_directive(d, module, line, warn, @import)
- { :noreply, d }
+ {:noreply, d}
end
- def handle_cast({ :add_alias, module, line, warn }, d) do
+ def handle_cast({:add_alias, module, line, warn}, d) do
add_directive(d, module, line, warn, @alias)
- { :noreply, d }
+ {:noreply, d}
end
def handle_cast(:stop, d) do
- { :stop, :normal, d }
+ {:stop, :normal, d}
end
def handle_cast(msg, d) do
- { :stop, { :bad_cast, msg }, d }
+ {:stop, {:bad_cast, msg}, d}
end
def handle_info(_msg, d) do
- { :noreply, d }
+ {:noreply, d}
end
def terminate(_reason, _d) do
@@ -150,7 +150,7 @@ defmodule Kernel.LexicalTracker do
end
def code_change(_old, d, _extra) do
- { :ok, d }
+ {:ok, d}
end
# Callbacks helpers
@@ -160,16 +160,16 @@ defmodule Kernel.LexicalTracker do
# If the value is true, it was imported/aliased
# If the value is a line, it was imported/aliased and has a pending warning
defp add_module(d, module) do
- :ets.insert_new(d, { module, false, false })
+ :ets.insert_new(d, {module, false, false})
end
defp add_dispatch(d, module, pos) do
- :ets.update_element(d, module, { pos, true })
+ :ets.update_element(d, module, {pos, true})
end
defp add_directive(d, module, line, warn, pos) do
add_module(d, module)
marker = if warn, do: line, else: true
- :ets.update_element(d, module, { pos, marker })
+ :ets.update_element(d, module, {pos, marker})
end
end
diff --git a/lib/elixir/lib/kernel/parallel_compiler.ex b/lib/elixir/lib/kernel/parallel_compiler.ex
index 96599d1a1..f732ead02 100644
--- a/lib/elixir/lib/kernel/parallel_compiler.ex
+++ b/lib/elixir/lib/kernel/parallel_compiler.ex
@@ -43,14 +43,14 @@ defmodule Kernel.ParallelCompiler do
defp spawn_compilers(files, path, callbacks) do
Code.ensure_loaded(Kernel.ErrorHandler)
compiler_pid = self()
- :elixir_code_server.cast({ :reset_warnings, compiler_pid })
+ :elixir_code_server.cast({:reset_warnings, compiler_pid})
schedulers = max(:erlang.system_info(:schedulers_online), 2)
result = spawn_compilers(files, files, path, callbacks, [], [], schedulers, [])
# In case --warning-as-errors is enabled and there was a warning,
# compilation status will be set to error and we fail with CompileError
- case :elixir_code_server.call({ :compilation_status, compiler_pid }) do
+ case :elixir_code_server.call({:compilation_status, compiler_pid}) do
:ok -> result
:error -> raise CompileError, [], []
end
@@ -64,8 +64,8 @@ defmodule Kernel.ParallelCompiler do
# Release waiting processes
defp spawn_compilers([h|t], original, output, callbacks, waiting, queued, schedulers, result) when is_pid(h) do
- { _kind, ^h, ref, _module } = List.keyfind(waiting, h, 1)
- send h, { ref, :ready }
+ {_kind, ^h, ref, _module} = List.keyfind(waiting, h, 1)
+ send h, {ref, :ready}
waiting = List.keydelete(waiting, h, 1)
spawn_compilers(t, original, output, callbacks, waiting, queued, schedulers, result)
end
@@ -74,7 +74,7 @@ defmodule Kernel.ParallelCompiler do
defp spawn_compilers([h|t], original, output, callbacks, waiting, queued, schedulers, result) do
parent = self()
- { pid, ref } =
+ {pid, ref} =
:erlang.spawn_monitor fn ->
# Notify Code.ensure_compiled/2 that we should
# attempt to compile the module by doing a dispatch.
@@ -90,27 +90,27 @@ defmodule Kernel.ParallelCompiler do
else
:elixir_compiler.file(h)
end
- { :compiled, h }
+ {:compiled, h}
catch
kind, reason ->
- { :failure, kind, reason, System.stacktrace }
+ {:failure, kind, reason, System.stacktrace}
end)
end
spawn_compilers(t, original, output, callbacks, waiting,
- [{ pid, ref, h }|queued], schedulers, result)
+ [{pid, ref, h}|queued], schedulers, result)
end
# No more files, nothing waiting, queue is empty, we are done
defp spawn_compilers([], _original, _output, _callbacks, [], [], _schedulers, result) do
- for { :module, mod } <- result, do: mod
+ for {:module, mod} <- result, do: mod
end
# Queued x, waiting for x: POSSIBLE ERROR! Release processes so we get the failures
defp spawn_compilers([], original, output, callbacks, waiting, queued, schedulers, result) when length(waiting) == length(queued) do
- Enum.each queued, fn { child, _, _ } ->
- { _kind, ^child, ref, _module } = List.keyfind(waiting, child, 1)
- send child, { ref, :release }
+ Enum.each queued, fn {child, _, _} ->
+ {_kind, ^child, ref, _module} = List.keyfind(waiting, child, 1)
+ send child, {ref, :release}
end
wait_for_messages([], original, output, callbacks, waiting, queued, schedulers, result)
end
@@ -123,44 +123,44 @@ defmodule Kernel.ParallelCompiler do
# Wait for messages from child processes
defp wait_for_messages(entries, original, output, callbacks, waiting, queued, schedulers, result) do
receive do
- { :struct_available, module } ->
- available = for { :struct, pid, _, waiting_module } <- waiting,
+ {:struct_available, module} ->
+ available = for {:struct, pid, _, waiting_module} <- waiting,
module == waiting_module,
not pid in entries,
do: pid
spawn_compilers(available ++ entries, original, output, callbacks,
- waiting, queued, schedulers, [{ :struct, module }|result])
+ waiting, queued, schedulers, [{:struct, module}|result])
- { :module_available, child, ref, file, module, binary } ->
+ {:module_available, child, ref, file, module, binary} ->
if callback = Keyword.get(callbacks, :each_module) do
callback.(file, module, binary)
end
# Release the module loader which is waiting for an ack
- send child, { ref, :ack }
+ send child, {ref, :ack}
- available = for { _kind, pid, _, waiting_module } <- waiting,
+ available = for {_kind, pid, _, waiting_module} <- waiting,
module == waiting_module,
not pid in entries,
do: pid
spawn_compilers(available ++ entries, original, output, callbacks,
- waiting, queued, schedulers, [{ :module, module }|result])
+ waiting, queued, schedulers, [{:module, module}|result])
- { :waiting, kind, child, ref, on } ->
- defined = fn { k, m } -> on == m and k in [kind, :module] end
+ {:waiting, kind, child, ref, on} ->
+ defined = fn {k, m} -> on == m and k in [kind, :module] end
# Oops, we already got it, do not put it on waiting.
if :lists.any(defined, result) do
- send child, { ref, :ready }
+ send child, {ref, :ready}
else
- waiting = [{ kind, child, ref, on }|waiting]
+ waiting = [{kind, child, ref, on}|waiting]
end
spawn_compilers(entries, original, output, callbacks, waiting, queued, schedulers, result)
- { :DOWN, _down_ref, :process, down_pid, { :compiled, file } } ->
+ {:DOWN, _down_ref, :process, down_pid, {:compiled, file}} ->
if callback = Keyword.get(callbacks, :each_file) do
callback.(file)
end
@@ -172,11 +172,11 @@ defmodule Kernel.ParallelCompiler do
new_waiting = List.keydelete(waiting, down_pid, 1)
spawn_compilers(new_entries, original, output, callbacks, new_waiting, new_queued, schedulers, result)
- { :DOWN, down_ref, :process, _down_pid, { :failure, kind, reason, stacktrace } } ->
+ {:DOWN, down_ref, :process, _down_pid, {:failure, kind, reason, stacktrace}} ->
handle_failure(down_ref, kind, reason, stacktrace, entries, waiting, queued)
wait_for_messages(entries, original, output, callbacks, waiting, queued, schedulers, result)
- { :DOWN, down_ref, :process, _down_pid, other } ->
+ {:DOWN, down_ref, :process, _down_pid, other} ->
handle_failure(down_ref, :exit, other, [], entries, waiting, queued)
wait_for_messages(entries, original, output, callbacks, waiting, queued, schedulers, result)
end
@@ -184,14 +184,14 @@ defmodule Kernel.ParallelCompiler do
defp handle_failure(ref, kind, reason, stacktrace, entries, waiting, queued) do
case List.keyfind(queued, ref, 1) do
- { child, ^ref, file } ->
+ {child, ^ref, file} ->
if many_missing?(child, entries, waiting, queued) do
IO.puts "== Compilation failed =="
IO.puts "Compilation failed on the following files:\n"
- Enum.each Enum.reverse(queued), fn { pid, _ref, file } ->
+ Enum.each Enum.reverse(queued), fn {pid, _ref, file} ->
case List.keyfind(waiting, pid, 1) do
- { _, ^pid, _, mod } -> IO.puts "* #{file} is missing module #{inspect mod}"
+ {_, ^pid, _, mod} -> IO.puts "* #{file} is missing module #{inspect mod}"
_ -> :ok
end
end
@@ -209,7 +209,7 @@ defmodule Kernel.ParallelCompiler do
defp many_missing?(child, entries, waiting, queued) do
waiting_length = length(waiting)
- match?({ _, ^child, _, _ }, List.keyfind(waiting, child, 1)) and
+ match?({_, ^child, _, _}, List.keyfind(waiting, child, 1)) and
waiting_length > 1 and entries == [] and
waiting_length == length(queued)
end
diff --git a/lib/elixir/lib/kernel/parallel_require.ex b/lib/elixir/lib/kernel/parallel_require.ex
index 48e6e4de4..a9132618b 100644
--- a/lib/elixir/lib/kernel/parallel_require.ex
+++ b/lib/elixir/lib/kernel/parallel_require.ex
@@ -33,9 +33,9 @@ defmodule Kernel.ParallelRequire do
compiler_pid = :erlang.get(:elixir_compiler_pid)
ensure_compiled = :erlang.get(:elixir_ensure_compiled)
- { :error_handler, handler } = :erlang.process_info(parent, :error_handler)
+ {:error_handler, handler} = :erlang.process_info(parent, :error_handler)
- { pid, ref } = :erlang.spawn_monitor fn ->
+ {pid, ref} = :erlang.spawn_monitor fn ->
if compiler_pid != :undefined do
:erlang.put(:elixir_compiler_pid, compiler_pid)
end
@@ -48,27 +48,27 @@ defmodule Kernel.ParallelRequire do
exit(try do
new = Code.require_file(h) || []
- { :required, Enum.map(new, &elem(&1, 0)), h }
+ {:required, Enum.map(new, &elem(&1, 0)), h}
catch
kind, reason ->
- { :failure, kind, reason, System.stacktrace }
+ {:failure, kind, reason, System.stacktrace}
end)
end
- spawn_requires(t, [{ pid, ref }|waiting], callback, schedulers, result)
+ spawn_requires(t, [{pid, ref}|waiting], callback, schedulers, result)
end
defp wait_for_messages(files, waiting, callback, schedulers, result) do
receive do
- { :DOWN, ref, :process, pid, status } ->
- tuple = { pid, ref }
+ {:DOWN, ref, :process, pid, status} ->
+ tuple = {pid, ref}
if tuple in waiting do
case status do
- { :required, mods, file } ->
+ {:required, mods, file} ->
callback.(file)
result = mods ++ result
waiting = List.delete(waiting, tuple)
- { :failure, kind, reason, stacktrace } ->
+ {:failure, kind, reason, stacktrace} ->
:erlang.raise(kind, reason, stacktrace)
other ->
:erlang.raise(:exit, other, [])
diff --git a/lib/elixir/lib/kernel/special_forms.ex b/lib/elixir/lib/kernel/special_forms.ex
index 7976da959..e32787ad5 100644
--- a/lib/elixir/lib/kernel/special_forms.ex
+++ b/lib/elixir/lib/kernel/special_forms.ex
@@ -33,11 +33,11 @@ defmodule Kernel.SpecialForms do
## Examples
- iex> { 1, 2, 3 }
- { 1, 2, 3 }
+ iex> {1, 2, 3}
+ {1, 2, 3}
- iex> quote do: { 1, 2, 3 }
- { :{}, [], [1,2,3] }
+ iex> quote do: {1, 2, 3}
+ {:{}, [], [1,2,3]}
"""
defmacro unquote(:{})(args)
@@ -49,19 +49,19 @@ defmodule Kernel.SpecialForms do
using the match operator (`===`). Maps can be created with
the `%{}` special form where keys are associated via `=>`:
- %{ 1 => 2 }
+ %{1 => 2}
Maps also support the keyword notation, as other special forms,
as long as they are at the end of the argument list:
- %{ hello: :world, with: :keywords }
- %{ :hello => :world, with: :keywords }
+ %{hello: :world, with: :keywords}
+ %{:hello => :world, with: :keywords}
If a map has duplicated keys, the last key will always have
higher precedence:
- iex> %{ a: :b, a: :c }
- %{ a: :c }
+ iex> %{a: :b, a: :c}
+ %{a: :c}
Conveniences for manipulating maps can be found in the
`Map` module.
@@ -72,7 +72,7 @@ defmodule Kernel.SpecialForms do
like `Map.get/3` and `Map.fetch/2`, a map can be accessed using the
`.` operator:
- iex> map = %{ a: :b }
+ iex> map = %{a: :b}
iex> map.a
:b
@@ -83,9 +83,9 @@ defmodule Kernel.SpecialForms do
Maps also support an update syntax:
- iex> map = %{ :a => :b }
- iex> %{ map | :a => :c }
- %{ :a => :c }
+ iex> map = %{:a => :b}
+ iex> %{map | :a => :c}
+ %{:a => :c}
Notice the update syntax requires the given keys to exist.
Trying to update a key that does not exist will raise an `ArgumentError`.
@@ -96,8 +96,8 @@ defmodule Kernel.SpecialForms do
always represented internally as a list of two-items tuples
for simplicity:
- iex> quote do: %{ :a => :b, c: :d }
- { :%{}, [], [{:a, :b}, {:c, :d}] }
+ iex> quote do: %{:a => :b, c: :d}
+ {:%{}, [], [{:a, :b}, {:c, :d}]}
"""
defmacro unquote(:%{})(args)
@@ -114,7 +114,7 @@ defmodule Kernel.SpecialForms do
defmodule User do
def __struct__ do
- %{ name: "josé", age: 27 }
+ %{name: "josé", age: 27}
end
end
@@ -125,13 +125,13 @@ defmodule Kernel.SpecialForms do
Underneath, a struct is just a map with a `__struct__` field
pointing to the User module:
- %User{} == %{ __struct__: User, name: "josé", age: 27 }
+ %User{} == %{__struct__: User, name: "josé", age: 27}
A struct also validates the given keys are part of the defined
struct. The example below will fail because there is no key
`:full_name` in the user struct:
- %User{ full_name: "José Valim" }
+ %User{full_name: "José Valim"}
Note that a struct specifies a minimum set of keys required
for operations. Other keys can be added to structs via the
@@ -142,7 +142,7 @@ defmodule Kernel.SpecialForms do
An update operation specific for structs is also available:
- %User{ user | age: 28 }
+ %User{user | age: 28}
The syntax above will guarantee the given keys are valid at
compilation time and it will guarantee at runtime the given
@@ -627,7 +627,7 @@ defmodule Kernel.SpecialForms do
following example will match:
iex> x = 0
- iex> { x, ^x } = { 1, 0 }
+ iex> {x, ^x} = {1, 0}
iex> x
1
@@ -640,7 +640,7 @@ defmodule Kernel.SpecialForms do
## Examples
quote do: sum(1, 2, 3)
- #=> { :sum, [], [1, 2, 3] }
+ #=> {:sum, [], [1, 2, 3]}
## Explanation
@@ -648,7 +648,7 @@ defmodule Kernel.SpecialForms do
The building block of Elixir macros is a tuple with three elements,
for example:
- { :sum, [], [1, 2, 3] }
+ {:sum, [], [1, 2, 3]}
The tuple above represents a function call to `sum` passing 1, 2 and
3 as arguments. The tuple elements are:
@@ -1025,7 +1025,7 @@ defmodule Kernel.SpecialForms do
functions on the fly. Consider this example:
kv = [foo: 1, bar: 2]
- Enum.each kv, fn { k, v } ->
+ Enum.each kv, fn {k, v} ->
def unquote(k)(), do: unquote(v)
end
@@ -1034,7 +1034,7 @@ defmodule Kernel.SpecialForms do
functionality into a macro:
defmacro defkv(kv) do
- Enum.map kv, fn { k, v } ->
+ Enum.map kv, fn {k, v} ->
quote do
def unquote(k)(), do: unquote(v)
end
@@ -1061,7 +1061,7 @@ defmodule Kernel.SpecialForms do
defmacro defkv(kv) do
quote do
- Enum.each unquote(kv), fn { k, v } ->
+ Enum.each unquote(kv), fn {k, v} ->
def unquote(k)(), do: unquote(v)
end
end
@@ -1082,7 +1082,7 @@ defmodule Kernel.SpecialForms do
defmacro defkv(kv) do
quote bind_quoted: [kv: kv] do
- Enum.each kv, fn { k, v } ->
+ Enum.each kv, fn {k, v} ->
def unquote(k)(), do: unquote(v)
end
end
@@ -1107,13 +1107,13 @@ defmodule Kernel.SpecialForms do
Which would then return:
- { :sum, [], [1, { :value, [], quoted }, 3] }
+ {:sum, [], [1, {:value, [], quoted}, 3]}
Which is not the expected result. For this, we use unquote:
value = 13
quote do: sum(1, unquote(value), 3)
- #=> { :sum, [], [1, 13, 3] }
+ #=> {:sum, [], [1, 13, 3]}
"""
defmacro unquote(:unquote)(expr)
@@ -1126,7 +1126,7 @@ defmodule Kernel.SpecialForms do
values = [2, 3, 4]
quote do: sum(1, unquote_splicing(values), 5)
- #=> { :sum, [], [1, 2, 3, 4, 5] }
+ #=> {:sum, [], [1, 2, 3, 4, 5]}
"""
defmacro unquote(:unquote_splicing)(expr)
@@ -1211,7 +1211,7 @@ defmodule Kernel.SpecialForms do
and should not be invoked directly:
iex> quote do: (1; 2; 3)
- { :__block__, [], [1, 2, 3] }
+ {:__block__, [], [1, 2, 3]}
"""
defmacro __block__(args)
@@ -1266,7 +1266,7 @@ defmodule Kernel.SpecialForms do
iex> fun = &{&1, &2}
iex> fun.(1, 2)
- { 1, 2 }
+ {1, 2}
iex> fun = &[&1|&2]
iex> fun.(1, 2)
@@ -1336,7 +1336,7 @@ defmodule Kernel.SpecialForms do
## Examples
case thing do
- { :selector, i, value } when is_integer(i) ->
+ {:selector, i, value} when is_integer(i) ->
value
value ->
value
@@ -1352,7 +1352,7 @@ defmodule Kernel.SpecialForms do
outer context:
case data do
- { :ok, value } -> value
+ {:ok, value} -> value
:error -> nil
end
@@ -1608,7 +1608,7 @@ defmodule Kernel.SpecialForms do
## Examples
receive do
- { :selector, i, value } when is_integer(i) ->
+ {:selector, i, value} when is_integer(i) ->
value
value when is_atom(value) ->
value
@@ -1620,7 +1620,7 @@ defmodule Kernel.SpecialForms do
received after the specified period of time:
receive do
- { :selector, i, value } when is_integer(i) ->
+ {:selector, i, value} when is_integer(i) ->
value
value when is_atom(value) ->
value
diff --git a/lib/elixir/lib/kernel/typespec.ex b/lib/elixir/lib/kernel/typespec.ex
index 47d614120..63176b35e 100644
--- a/lib/elixir/lib/kernel/typespec.ex
+++ b/lib/elixir/lib/kernel/typespec.ex
@@ -69,7 +69,7 @@ defmodule Kernel.Typespec do
Tuple :: tuple # a tuple of any size
| {} # empty tuple
- | { TList }
+ | {TList}
TList :: Type
| Type, TList
@@ -119,7 +119,7 @@ defmodule Kernel.Typespec do
`iodata` | `iolist` &#124; `binary`
`iolist` | `maybe_improper_list(byte` &#124; `binary` &#124; `iolist, binary` &#124; `[])`
`module` | `atom`
- `mfa` | `{ atom, atom, arity }`
+ `mfa` | `{atom, atom, arity}`
`arity` | `0..255`
`node` | `atom`
`timeout` | `:infinity` &#124; `non_neg_integer`
@@ -151,7 +151,7 @@ defmodule Kernel.Typespec do
Types can be parametrised by defining variables as parameters, these variables
can then be used to define the type.
- @type dict(key, value) :: [{ key, value }]
+ @type dict(key, value) :: [{key, value}]
Types can also be defined for records, see `defrecord/3`.
@@ -269,12 +269,12 @@ defmodule Kernel.Typespec do
@doc """
Defines a `type`, `typep` or `opaque` by receiving Erlang's typespec.
"""
- def define_type(caller, kind, { name, _, vars } = type) when kind in [:type, :typep, :opaque] do
- { kind, export } =
+ def define_type(caller, kind, {name, _, vars} = type) when kind in [:type, :typep, :opaque] do
+ {kind, export} =
case kind do
- :type -> { :type, true }
- :typep -> { :type, false }
- :opaque -> { :opaque, true }
+ :type -> {:type, true}
+ :typep -> {:type, false}
+ :opaque -> {:opaque, true}
end
module = caller.module
@@ -283,7 +283,7 @@ defmodule Kernel.Typespec do
Module.compile_typespec module, kind, type
if export do
- Module.compile_typespec(module, :export_type, [{ name, arity }])
+ Module.compile_typespec(module, :export_type, [{name, arity}])
end
define_doc(caller, kind, name, arity, export)
@@ -296,7 +296,7 @@ defmodule Kernel.Typespec do
if doc do
if export do
- Module.add_doc(module, caller.line, kind, { name, arity }, doc)
+ Module.add_doc(module, caller.line, kind, {name, arity}, doc)
else
:elixir_errors.warn caller.line, caller.file, "type #{name}/#{arity} is private, " <>
"@typedoc's are always discarded for private types\n"
@@ -310,14 +310,14 @@ defmodule Kernel.Typespec do
Defines a `spec` by receiving Erlang's typespec.
"""
def define_spec(module, tuple, definition) do
- Module.compile_typespec module, :spec, { tuple, definition }
+ Module.compile_typespec module, :spec, {tuple, definition}
end
@doc """
Defines a `callback` by receiving Erlang's typespec.
"""
def define_callback(module, tuple, definition) do
- Module.compile_typespec module, :callback, { tuple, definition }
+ Module.compile_typespec module, :callback, {tuple, definition}
end
@doc """
@@ -326,7 +326,7 @@ defmodule Kernel.Typespec do
for modules being compiled.
"""
def defines_type?(module, name, arity) do
- finder = &match?({ ^name, _, vars } when length(vars) == arity, &1)
+ finder = &match?({^name, _, vars} when length(vars) == arity, &1)
Enum.any?(Module.get_attribute(module, :type), finder) or
Enum.any?(Module.get_attribute(module, :opaque), finder)
end
@@ -336,7 +336,7 @@ defmodule Kernel.Typespec do
This function is only available for modules being compiled.
"""
def defines_spec?(module, name, arity) do
- tuple = { name, arity }
+ tuple = {name, arity}
Enum.any?(Module.get_attribute(module, :spec), &match?(^tuple, &1))
end
@@ -345,39 +345,39 @@ defmodule Kernel.Typespec do
This function is only available for modules being compiled.
"""
def defines_callback?(module, name, arity) do
- tuple = { name, arity }
+ tuple = {name, arity}
Enum.any?(Module.get_attribute(module, :callback), &match?(^tuple, &1))
end
@doc """
Converts a spec clause back to Elixir AST.
"""
- def spec_to_ast(name, { :type, line, :fun, [{:type, _, :product, args}, result] }) do
+ def spec_to_ast(name, {:type, line, :fun, [{:type, _, :product, args}, result]}) do
meta = [line: line]
- body = { name, meta, Enum.map(args, &typespec_to_ast/1) }
+ body = {name, meta, Enum.map(args, &typespec_to_ast/1)}
vars = args ++ [result]
|> Enum.flat_map(&collect_vars/1)
|> Enum.uniq
- |> Enum.map(&{ &1, { :var, meta, nil } })
+ |> Enum.map(&{&1, {:var, meta, nil}})
- spec = { :::, meta, [body, typespec_to_ast(result)] }
+ spec = {:::, meta, [body, typespec_to_ast(result)]}
if vars == [] do
spec
else
- { :when, meta, [spec, vars] }
+ {:when, meta, [spec, vars]}
end
end
- def spec_to_ast(name, { :type, line, :fun, [] }) do
- { :::, [line: line], [{ name, [line: line], [] }, quote(do: term)] }
+ def spec_to_ast(name, {:type, line, :fun, []}) do
+ {:::, [line: line], [{name, [line: line], []}, quote(do: term)]}
end
- def spec_to_ast(name, { :type, line, :bounded_fun, [{ :type, _, :fun, [{ :type, _, :product, args }, result] }, constraints] }) do
+ def spec_to_ast(name, {:type, line, :bounded_fun, [{:type, _, :fun, [{:type, _, :product, args}, result]}, constraints]}) do
guards =
- for { :type, _, :constraint, [{ :atom, _, :is_subtype }, [{ :var, _, var }, type]] } <- constraints do
- { var, typespec_to_ast(type) }
+ for {:type, _, :constraint, [{:atom, _, :is_subtype}, [{:var, _, var}, type]]} <- constraints do
+ {var, typespec_to_ast(type)}
end
meta = [line: line]
@@ -386,27 +386,27 @@ defmodule Kernel.Typespec do
|> Enum.flat_map(&collect_vars/1)
|> Enum.uniq
|> Kernel.--(Keyword.keys(guards))
- |> Enum.map(&{ &1, { :var, meta, nil } })
+ |> Enum.map(&{&1, {:var, meta, nil}})
args = for arg <- args, do: typespec_to_ast(arg)
- { :when, meta, [
- { :::, meta, [{ name, [line: line], args }, typespec_to_ast(result)] },
+ {:when, meta, [
+ {:::, meta, [{name, [line: line], args}, typespec_to_ast(result)]},
guards ++ vars
- ] }
+ ]}
end
@doc """
Converts a type clause back to Elixir AST.
"""
- def type_to_ast({ { :record, record }, fields, args }) when is_atom(record) do
+ def type_to_ast({{:record, record}, fields, args}) when is_atom(record) do
fields = for field <- fields, do: typespec_to_ast(field)
args = for arg <- args, do: typespec_to_ast(arg)
- type = { :{}, [], [record|fields] }
+ type = {:{}, [], [record|fields]}
quote do: unquote(record)(unquote_splicing(args)) :: unquote(type)
end
- def type_to_ast({ name, type, args }) do
+ def type_to_ast({name, type, args}) do
args = for arg <- args, do: typespec_to_ast(arg)
quote do: unquote(name)(unquote_splicing(args)) :: unquote(typespec_to_ast(type))
end
@@ -423,8 +423,8 @@ defmodule Kernel.Typespec do
@spec beam_typedocs(module | binary) :: [tuple] | nil
def beam_typedocs(module) when is_atom(module) or is_binary(module) do
case abstract_code(module) do
- { :ok, abstract_code } ->
- type_docs = for { :attribute, _, :typedoc, tup } <- abstract_code, do: tup
+ {:ok, abstract_code} ->
+ type_docs = for {:attribute, _, :typedoc, tup} <- abstract_code, do: tup
List.flatten(type_docs)
_ ->
nil
@@ -443,15 +443,15 @@ defmodule Kernel.Typespec do
@spec beam_types(module | binary) :: [tuple] | nil
def beam_types(module) when is_atom(module) or is_binary(module) do
case abstract_code(module) do
- { :ok, abstract_code } ->
- exported_types = for { :attribute, _, :export_type, types } <- abstract_code, do: types
+ {:ok, abstract_code} ->
+ exported_types = for {:attribute, _, :export_type, types} <- abstract_code, do: types
exported_types = List.flatten(exported_types)
- for { :attribute, _, kind, { name, _, args } = type } <- abstract_code, kind in [:opaque, :type] do
+ for {:attribute, _, kind, {name, _, args} = type} <- abstract_code, kind in [:opaque, :type] do
cond do
- kind == :opaque -> { :opaque, type }
- { name, length(args) } in exported_types -> { :type, type }
- true -> { :typep, type }
+ kind == :opaque -> {:opaque, type}
+ {name, length(args)} in exported_types -> {:type, type}
+ true -> {:typep, type}
end
end
_ ->
@@ -489,8 +489,8 @@ defmodule Kernel.Typespec do
defp from_abstract_code(module, kind) do
case abstract_code(module) do
- { :ok, abstract_code } ->
- for { :attribute, _, abs_kind, value } <- abstract_code, kind == abs_kind, do: value
+ {:ok, abstract_code} ->
+ for {:attribute, _, abs_kind, value} <- abstract_code, kind == abs_kind, do: value
:error ->
nil
end
@@ -498,8 +498,8 @@ defmodule Kernel.Typespec do
defp abstract_code(module) do
case :beam_lib.chunks(abstract_code_beam(module), [:abstract_code]) do
- { :ok, { _, [{ :abstract_code, { _raw_abstract_v1, abstract_code } }] } } ->
- { :ok, abstract_code }
+ {:ok, {_, [{:abstract_code, {_raw_abstract_v1, abstract_code}}]}} ->
+ {:ok, abstract_code}
_ ->
:error
end
@@ -507,7 +507,7 @@ defmodule Kernel.Typespec do
defp abstract_code_beam(module) when is_atom(module) do
case :code.get_object_code(module) do
- { ^module, beam, _filename } -> beam
+ {^module, beam, _filename} -> beam
:error -> module
end
end
@@ -519,7 +519,7 @@ defmodule Kernel.Typespec do
## Macro callbacks
@doc false
- def deftype(kind, { :::, _, [{ name, _, args }, definition] }, caller) when is_atom(name) and name != ::: do
+ def deftype(kind, {:::, _, [{name, _, args}, definition]}, caller) when is_atom(name) and name != ::: do
args =
if is_atom(args) do
[]
@@ -527,11 +527,11 @@ defmodule Kernel.Typespec do
for(arg <- args, do: variable(arg))
end
- vars = for { :var, _, var } <- args, do: var
+ vars = for {:var, _, var} <- args, do: var
spec = typespec(definition, vars, caller)
- vars = for { :var, _, _ } = var <- args, do: var
- type = { name, spec, vars }
+ vars = for {:var, _, _} = var <- args, do: var
+ type = {name, spec, vars}
define_type(caller, kind, type)
end
@@ -543,7 +543,7 @@ defmodule Kernel.Typespec do
@doc false
- def defspec(type, { :when, _meta, [spec, guard] }, caller) do
+ def defspec(type, {:when, _meta, [spec, guard]}, caller) do
defspec(type, spec, guard, caller)
end
@@ -551,7 +551,7 @@ defmodule Kernel.Typespec do
defspec(type, spec, [], caller)
end
- defp defspec(type, { :::, meta, [{ name, _, args }, return] }, guard, caller) when is_atom(name) and name != ::: do
+ defp defspec(type, {:::, meta, [{name, _, args}, return]}, guard, caller) when is_atom(name) and name != ::: do
if is_atom(args), do: args = []
unless Keyword.keyword?(guard) do
@@ -562,12 +562,12 @@ defmodule Kernel.Typespec do
vars = Keyword.keys(guard)
constraints = guard_to_constraints(guard, vars, meta, caller)
- spec = { :type, line(meta), :fun, fn_args(meta, args, return, vars, caller) }
+ spec = {:type, line(meta), :fun, fn_args(meta, args, return, vars, caller)}
if constraints != [] do
- spec = { :type, line(meta), :bounded_fun, [spec, constraints] }
+ spec = {:type, line(meta), :bounded_fun, [spec, constraints]}
end
- code = { { name, Kernel.length(args) }, spec }
+ code = {{name, Kernel.length(args)}, spec}
Module.compile_typespec(caller.module, type, code)
code
end
@@ -581,38 +581,38 @@ defmodule Kernel.Typespec do
line = line(meta)
Enum.reduce(guard, [], fn
- { _name, { :var, _, context } }, acc when is_atom(context) ->
+ {_name, {:var, _, context}}, acc when is_atom(context) ->
acc
- { name, type }, acc ->
- constraint = [{ :atom, line, :is_subtype }, [{:var, line, name}, typespec(type, vars, caller)]]
- type = { :type, line, :constraint, constraint }
+ {name, type}, acc ->
+ constraint = [{:atom, line, :is_subtype}, [{:var, line, name}, typespec(type, vars, caller)]]
+ type = {:type, line, :constraint, constraint}
[type|acc]
end) |> Enum.reverse
end
## To AST conversion
- defp collect_vars({ :ann_type, _line, args }) when is_list(args) do
+ defp collect_vars({:ann_type, _line, args}) when is_list(args) do
[]
end
- defp collect_vars({ :type, _line, _kind, args }) when is_list(args) do
+ defp collect_vars({:type, _line, _kind, args}) when is_list(args) do
Enum.flat_map(args, &collect_vars/1)
end
- defp collect_vars({ :remote_type, _line, args }) when is_list(args) do
+ defp collect_vars({:remote_type, _line, args}) when is_list(args) do
Enum.flat_map(args, &collect_vars/1)
end
- defp collect_vars({ :typed_record_field, _line, type }) do
+ defp collect_vars({:typed_record_field, _line, type}) do
collect_vars(type)
end
- defp collect_vars({ :paren_type, _line, [type] }) do
+ defp collect_vars({:paren_type, _line, [type]}) do
collect_vars(type)
end
- defp collect_vars({ :var, _line, var }) do
+ defp collect_vars({:var, _line, var}) do
[erl_to_ex_var(var)]
end
@@ -620,42 +620,42 @@ defmodule Kernel.Typespec do
[]
end
- defp typespec_to_ast({ :type, line, :tuple, :any }) do
- { :tuple, [line: line], [] }
+ defp typespec_to_ast({:type, line, :tuple, :any}) do
+ {:tuple, [line: line], []}
end
- defp typespec_to_ast({ :type, line, :tuple, args }) do
+ defp typespec_to_ast({:type, line, :tuple, args}) do
args = for arg <- args, do: typespec_to_ast(arg)
- { :{}, [line: line], args }
+ {:{}, [line: line], args}
end
- defp typespec_to_ast({ :type, _line, :list, [{ :type, _, :union, unions } = arg] }) do
+ defp typespec_to_ast({:type, _line, :list, [{:type, _, :union, unions} = arg]}) do
case unpack_typespec_kw(unions, []) do
- { :ok, ast } -> ast
+ {:ok, ast} -> ast
:error -> [typespec_to_ast(arg)]
end
end
- defp typespec_to_ast({ :type, _line, :list, args }) do
+ defp typespec_to_ast({:type, _line, :list, args}) do
for arg <- args, do: typespec_to_ast(arg)
end
- defp typespec_to_ast({ :type, line, :map, fields }) do
- fields = Enum.map fields, fn { :type, _, :map_field_assoc, k, v } ->
- { typespec_to_ast(k), typespec_to_ast(v) }
+ defp typespec_to_ast({:type, line, :map, fields}) do
+ fields = Enum.map fields, fn {:type, _, :map_field_assoc, k, v} ->
+ {typespec_to_ast(k), typespec_to_ast(v)}
end
- { struct, fields } = Keyword.pop(fields, :__struct__)
- map = { :%{}, [line: line], fields }
+ {struct, fields} = Keyword.pop(fields, :__struct__)
+ map = {:%{}, [line: line], fields}
if struct do
- { :%, [line: line], [struct, map] }
+ {:%, [line: line], [struct, map]}
else
map
end
end
- defp typespec_to_ast({ :type, line, :binary, [arg1, arg2] }) do
+ defp typespec_to_ast({:type, line, :binary, [arg1, arg2]}) do
[arg1, arg2] = for arg <- [arg1, arg2], do: typespec_to_ast(arg)
cond do
arg2 == 0 ->
@@ -667,79 +667,79 @@ defmodule Kernel.Typespec do
end
end
- defp typespec_to_ast({ :type, line, :union, args }) do
+ defp typespec_to_ast({:type, line, :union, args}) do
args = for arg <- args, do: typespec_to_ast(arg)
- Enum.reduce Enum.reverse(args), fn(arg, expr) -> { :|, [line: line], [arg, expr] } end
+ Enum.reduce Enum.reverse(args), fn(arg, expr) -> {:|, [line: line], [arg, expr]} end
end
- defp typespec_to_ast({ :type, line, :fun, [{:type, _, :product, args}, result] }) do
+ defp typespec_to_ast({:type, line, :fun, [{:type, _, :product, args}, result]}) do
args = for arg <- args, do: typespec_to_ast(arg)
- [{ :->, [line: line], [args, typespec_to_ast(result)] }]
+ [{:->, [line: line], [args, typespec_to_ast(result)]}]
end
- defp typespec_to_ast({ :type, line, :fun, [args, result] }) do
- [{ :->, [line: line], [[typespec_to_ast(args)], typespec_to_ast(result)] }]
+ defp typespec_to_ast({:type, line, :fun, [args, result]}) do
+ [{:->, [line: line], [[typespec_to_ast(args)], typespec_to_ast(result)]}]
end
- defp typespec_to_ast({ :type, line, :fun, [] }) do
- typespec_to_ast({ :type, line, :fun, [{:type, line, :any}, {:type, line, :any, []} ] })
+ defp typespec_to_ast({:type, line, :fun, []}) do
+ typespec_to_ast({:type, line, :fun, [{:type, line, :any}, {:type, line, :any, []} ]})
end
- defp typespec_to_ast({ :type, line, :range, [left, right] }) do
- { :"..", [line: line], [typespec_to_ast(left), typespec_to_ast(right)] }
+ defp typespec_to_ast({:type, line, :range, [left, right]}) do
+ {:"..", [line: line], [typespec_to_ast(left), typespec_to_ast(right)]}
end
- defp typespec_to_ast({ :type, line, name, args }) do
+ defp typespec_to_ast({:type, line, name, args}) do
args = for arg <- args, do: typespec_to_ast(arg)
- { name, [line: line], args }
+ {name, [line: line], args}
end
- defp typespec_to_ast({ :var, line, var }) do
- { erl_to_ex_var(var), line, nil }
+ defp typespec_to_ast({:var, line, var}) do
+ {erl_to_ex_var(var), line, nil}
end
- defp typespec_to_ast({ :op, line, op, arg }) do
- { op, [line: line], [typespec_to_ast(arg)] }
+ defp typespec_to_ast({:op, line, op, arg}) do
+ {op, [line: line], [typespec_to_ast(arg)]}
end
# Special shortcut(s)
- defp typespec_to_ast({ :remote_type, line, [{:atom, _, :elixir}, {:atom, _, :char_list}, []] }) do
+ defp typespec_to_ast({:remote_type, line, [{:atom, _, :elixir}, {:atom, _, :char_list}, []]}) do
typespec_to_ast({:type, line, :char_list, []})
end
- defp typespec_to_ast({ :remote_type, line, [{:atom, _, :elixir}, {:atom, _, :char_data}, []] }) do
+ defp typespec_to_ast({:remote_type, line, [{:atom, _, :elixir}, {:atom, _, :char_data}, []]}) do
typespec_to_ast({:type, line, :char_data, []})
end
- defp typespec_to_ast({ :remote_type, line, [{:atom, _, :elixir}, {:atom, _, :as_boolean}, [arg]] }) do
+ defp typespec_to_ast({:remote_type, line, [{:atom, _, :elixir}, {:atom, _, :as_boolean}, [arg]]}) do
typespec_to_ast({:type, line, :as_boolean, [arg]})
end
- defp typespec_to_ast({ :remote_type, line, [mod, name, args] }) do
+ defp typespec_to_ast({:remote_type, line, [mod, name, args]}) do
args = for arg <- args, do: typespec_to_ast(arg)
- dot = { :., [line: line], [typespec_to_ast(mod), typespec_to_ast(name)] }
- { dot, [line: line], args }
+ dot = {:., [line: line], [typespec_to_ast(mod), typespec_to_ast(name)]}
+ {dot, [line: line], args}
end
- defp typespec_to_ast({ :ann_type, line, [var, type] }) do
- { :::, [line: line], [typespec_to_ast(var), typespec_to_ast(type)] }
+ defp typespec_to_ast({:ann_type, line, [var, type]}) do
+ {:::, [line: line], [typespec_to_ast(var), typespec_to_ast(type)]}
end
- defp typespec_to_ast({ :typed_record_field,
- { :record_field, line, { :atom, line1, name } },
- type }) do
- typespec_to_ast({ :ann_type, line, [{ :var, line1, name }, type] })
+ defp typespec_to_ast({:typed_record_field,
+ {:record_field, line, {:atom, line1, name}},
+ type}) do
+ typespec_to_ast({:ann_type, line, [{:var, line1, name}, type]})
end
- defp typespec_to_ast({ :type, _, :any }) do
+ defp typespec_to_ast({:type, _, :any}) do
quote do: ...
end
- defp typespec_to_ast({ :paren_type, _, [type] }) do
+ defp typespec_to_ast({:paren_type, _, [type]}) do
typespec_to_ast(type)
end
- defp typespec_to_ast({ t, _line, atom }) when is_atom(t) do
+ defp typespec_to_ast({t, _line, atom}) when is_atom(t) do
atom
end
@@ -758,16 +758,16 @@ defmodule Kernel.Typespec do
defp line(meta) do
case :lists.keyfind(:line, 1, meta) do
- { :line, line } -> line
+ {:line, line} -> line
false -> 0
end
end
# Handle unions
- defp typespec({ :|, meta, [_, _] } = exprs, vars, caller) do
+ defp typespec({:|, meta, [_, _]} = exprs, vars, caller) do
exprs = collect_union(exprs)
union = for e <- exprs, do: typespec(e, vars, caller)
- { :type, line(meta), :union, union }
+ {:type, line(meta), :union, union}
end
# Handle binaries
@@ -785,7 +785,7 @@ defmodule Kernel.Typespec do
## Handle maps and structs
defp typespec({:%{}, meta, fields}, vars, caller) do
- fields = Enum.map(fields, fn { k, v } ->
+ fields = Enum.map(fields, fn {k, v} ->
{:type, line(meta), :map_field_assoc, typespec(k, vars, caller), typespec(v, vars, caller)}
end)
{:type, line(meta), :map, fields}
@@ -813,25 +813,25 @@ defmodule Kernel.Typespec do
# Handle funs
defp typespec([{:->, meta, [arguments, return]}], vars, caller) when is_list(arguments) do
args = fn_args(meta, arguments, return, vars, caller)
- { :type, line(meta), :fun, args }
+ {:type, line(meta), :fun, args}
end
# Handle type operator
- defp typespec({:::, meta, [var, expr] }, vars, caller) do
+ defp typespec({:::, meta, [var, expr]}, vars, caller) do
left = typespec(var, [elem(var, 0)|vars], caller)
right = typespec(expr, vars, caller)
- { :ann_type, line(meta), [left, right] }
+ {:ann_type, line(meta), [left, right]}
end
# Handle unary ops
defp typespec({op, meta, [integer]}, _, _) when op in [:+, :-] and is_integer(integer) do
- { :op, line(meta), op, {:integer, line(meta), integer} }
+ {:op, line(meta), op, {:integer, line(meta), integer}}
end
# Handle access macro
defp typespec({{:., meta, [Kernel, :access]}, meta1, [target, args]}, vars, caller) do
access = {{:., meta, [Kernel, :access]}, meta1,
- [target, args ++ [_: { :any, [], [] }]]}
+ [target, args ++ [_: {:any, [], []}]]}
typespec(Macro.expand(access, caller), vars, caller)
end
@@ -846,16 +846,16 @@ defmodule Kernel.Typespec do
# Handle tuples
defp typespec({:tuple, meta, args}, _vars, _caller) when args == [] or is_atom(args) do
- { :type, line(meta), :tuple, :any }
+ {:type, line(meta), :tuple, :any}
end
defp typespec({:{}, meta, t}, vars, caller) when is_list(t) do
args = for e <- t, do: typespec(e, vars, caller)
- { :type, line(meta), :tuple, args }
+ {:type, line(meta), :tuple, args}
end
- defp typespec({ left, right }, vars, caller) do
- typespec({ :{}, [], [left, right] }, vars, caller)
+ defp typespec({left, right}, vars, caller) do
+ typespec({:{}, [], [left, right]}, vars, caller)
end
# Handle blocks
@@ -866,7 +866,7 @@ defmodule Kernel.Typespec do
# Handle variables or local calls
defp typespec({name, meta, atom}, vars, caller) when is_atom(atom) do
if name in vars do
- { :var, line(meta), name }
+ {:var, line(meta), name}
else
typespec({name, meta, []}, vars, caller)
end
@@ -877,7 +877,7 @@ defmodule Kernel.Typespec do
:elixir_errors.warn caller.line, caller.file, "string() type use is discouraged. For character lists, use " <>
"char_list() type, for strings, String.t()\n#{Exception.format_stacktrace(caller.stacktrace)}"
arguments = for arg <- arguments, do: typespec(arg, vars, caller)
- { :type, line(meta), :string, arguments }
+ {:type, line(meta), :string, arguments}
end
defp typespec({:char_list, _meta, []}, vars, caller) do
@@ -894,36 +894,36 @@ defmodule Kernel.Typespec do
defp typespec({name, meta, arguments}, vars, caller) do
arguments = for arg <- arguments, do: typespec(arg, vars, caller)
- { :type, line(meta), name, arguments }
+ {:type, line(meta), name, arguments}
end
# Handle literals
defp typespec(atom, _, _) when is_atom(atom) do
- { :atom, 0, atom }
+ {:atom, 0, atom}
end
defp typespec(integer, _, _) when is_integer(integer) do
- { :integer, 0, integer }
+ {:integer, 0, integer}
end
defp typespec([], vars, caller) do
- typespec({ nil, [], [] }, vars, caller)
+ typespec({nil, [], []}, vars, caller)
end
defp typespec([spec], vars, caller) do
- typespec({ :list, [], [spec] }, vars, caller)
+ typespec({:list, [], [spec]}, vars, caller)
end
defp typespec([spec, {:"...", _, quoted}], vars, caller) when is_atom(quoted) do
- typespec({ :nonempty_list, [], [spec] }, vars, caller)
+ typespec({:nonempty_list, [], [spec]}, vars, caller)
end
defp typespec(list, vars, caller) do
[h|t] = Enum.reverse(list)
union = Enum.reduce(t, validate_kw(h, list, caller), fn(x, acc) ->
- { :|, [], [validate_kw(x, list, caller), acc] }
+ {:|, [], [validate_kw(x, list, caller), acc]}
end)
- typespec({ :list, [], [union] }, vars, caller)
+ typespec({:list, [], [union]}, vars, caller)
end
## Helpers
@@ -934,13 +934,13 @@ defmodule Kernel.Typespec do
defp remote_type({remote, meta, name, arguments}, vars, caller) do
arguments = for arg <- arguments, do: typespec(arg, vars, caller)
- { :remote_type, line(meta), [ remote, name, arguments ] }
+ {:remote_type, line(meta), [ remote, name, arguments ]}
end
- defp collect_union({ :|, _, [a, b] }), do: [a|collect_union(b)]
+ defp collect_union({:|, _, [a, b]}), do: [a|collect_union(b)]
defp collect_union(v), do: [v]
- defp validate_kw({ key, _ } = t, _, _caller) when is_atom(key), do: t
+ defp validate_kw({key, _} = t, _, _caller) when is_atom(key), do: t
defp validate_kw(_, original, caller) do
compile_error(caller, "unexpected list in typespec: #{Macro.to_string original}")
end
@@ -953,24 +953,24 @@ defmodule Kernel.Typespec do
end
defp fn_args(meta, [{:"...", _, _}], _vars, _caller) do
- { :type, line(meta), :any }
+ {:type, line(meta), :any}
end
defp fn_args(meta, args, vars, caller) do
args = for arg <- args, do: typespec(arg, vars, caller)
- { :type, line(meta), :product, args }
+ {:type, line(meta), :product, args}
end
defp variable({name, meta, _}) do
{:var, line(meta), name}
end
- defp unpack_typespec_kw([{ :type, _, :tuple, [{ :atom, _, atom }, type] }|t], acc) do
+ defp unpack_typespec_kw([{:type, _, :tuple, [{:atom, _, atom}, type]}|t], acc) do
unpack_typespec_kw(t, [{atom, typespec_to_ast(type)}|acc])
end
defp unpack_typespec_kw([], acc) do
- { :ok, :lists.reverse(acc) }
+ {:ok, :lists.reverse(acc)}
end
defp unpack_typespec_kw(_, _acc) do
diff --git a/lib/elixir/lib/keyword.ex b/lib/elixir/lib/keyword.ex
index 307c2aefe..705c5fc45 100644
--- a/lib/elixir/lib/keyword.ex
+++ b/lib/elixir/lib/keyword.ex
@@ -27,13 +27,13 @@ defmodule Keyword do
@type value :: any
@type t :: [{key, value}]
- @type t(value) :: [{ key, value }]
+ @type t(value) :: [{key, value}]
@doc """
Checks if the given argument is a keywords list or not.
"""
@spec keyword?(term) :: boolean
- def keyword?([{ key, _value } | rest]) when is_atom(key) do
+ def keyword?([{key, _value} | rest]) when is_atom(key) do
keyword?(rest)
end
@@ -63,7 +63,7 @@ defmodule Keyword do
"""
@spec new(Enum.t) :: t
def new(pairs) do
- Enum.reduce pairs, [], fn { k, v }, keywords ->
+ Enum.reduce pairs, [], fn {k, v}, keywords ->
put(keywords, k, v)
end
end
@@ -84,7 +84,7 @@ defmodule Keyword do
@spec new(Enum.t, ({key, value} -> {key, value})) :: t
def new(pairs, transform) do
Enum.reduce pairs, [], fn i, keywords ->
- { k, v } = transform.(i)
+ {k, v} = transform.(i)
put(keywords, k, v)
end
end
@@ -113,7 +113,7 @@ defmodule Keyword do
@spec get(t, key, value) :: value
def get(keywords, key, default \\ nil) when is_list(keywords) and is_atom(key) do
case :lists.keyfind(key, 1, keywords) do
- { ^key, value } -> value
+ {^key, value} -> value
false -> default
end
end
@@ -125,7 +125,7 @@ defmodule Keyword do
## Examples
iex> Keyword.fetch([a: 1], :a)
- { :ok, 1 }
+ {:ok, 1}
iex> Keyword.fetch([a: 1], :b)
:error
@@ -134,7 +134,7 @@ defmodule Keyword do
@spec fetch(t, key) :: value
def fetch(keywords, key) when is_list(keywords) and is_atom(key) do
case :lists.keyfind(key, 1, keywords) do
- { ^key, value } -> { :ok, value }
+ {^key, value} -> {:ok, value}
false -> :error
end
end
@@ -155,7 +155,7 @@ defmodule Keyword do
@spec fetch!(t, key) :: value | no_return
def fetch!(keywords, key) when is_list(keywords) and is_atom(key) do
case :lists.keyfind(key, 1, keywords) do
- { ^key, value } -> value
+ {^key, value} -> value
false -> raise(KeyError, key: key, term: keywords)
end
end
@@ -171,7 +171,7 @@ defmodule Keyword do
"""
@spec get_values(t, key) :: [value]
def get_values(keywords, key) when is_list(keywords) and is_atom(key) do
- for { k, v } <- keywords, key == k, do: v
+ for {k, v} <- keywords, key == k, do: v
end
@doc """
@@ -189,7 +189,7 @@ defmodule Keyword do
"""
@spec keys(t) :: [key]
def keys(keywords) when is_list(keywords) do
- for { key, _ } <- keywords, do: key
+ for {key, _} <- keywords, do: key
end
@doc """
@@ -203,7 +203,7 @@ defmodule Keyword do
"""
@spec values(t) :: [value]
def values(keywords) when is_list(keywords) do
- for { _, value } <- keywords, do: value
+ for {_, value} <- keywords, do: value
end
@doc """
@@ -224,7 +224,7 @@ defmodule Keyword do
"""
@spec delete(t, key, value) :: t
def delete(keywords, key, value) when is_list(keywords) and is_atom(key) do
- for { k, v } = tuple <- keywords, key != k or value != v, do: tuple
+ for {k, v} = tuple <- keywords, key != k or value != v, do: tuple
end
@doc """
@@ -247,7 +247,7 @@ defmodule Keyword do
"""
@spec delete(t, key) :: t
def delete(keywords, key) when is_list(keywords) and is_atom(key) do
- for { k, _ } = tuple <- keywords, key != k, do: tuple
+ for {k, _} = tuple <- keywords, key != k, do: tuple
end
@doc """
@@ -304,7 +304,7 @@ defmodule Keyword do
@spec put_new(t, key, value) :: t
def put_new(keywords, key, value) when is_list(keywords) and is_atom(key) do
case :lists.keyfind(key, 1, keywords) do
- { ^key, _ } -> keywords
+ {^key, _} -> keywords
false -> [{key, value}|keywords]
end
end
@@ -336,7 +336,7 @@ defmodule Keyword do
"""
@spec merge(t, t) :: t
def merge(d1, d2) when is_list(d1) and is_list(d2) do
- d2 ++ for({ k, _ } = tuple <- d1, not has_key?(d2, k), do: tuple)
+ d2 ++ for({k, _} = tuple <- d1, not has_key?(d2, k), do: tuple)
end
@doc """
@@ -356,7 +356,7 @@ defmodule Keyword do
do_merge(d2, d1, fun)
end
- defp do_merge([{ k, v2 }|t], acc, fun) do
+ defp do_merge([{k, v2}|t], acc, fun) do
do_merge t, update(acc, k, v2, fn(v1) -> fun.(k, v1, v2) end), fun
end
@@ -445,24 +445,24 @@ defmodule Keyword do
iex> d = [a: 1, b: 2, c: 3, d: 4]
iex> Keyword.split(d, [:a, :c, :e])
- { [a: 1, c: 3], [b: 2, d: 4] }
+ {[a: 1, c: 3], [b: 2, d: 4]}
iex> d = [a: 1, b: 2, c: 3, d: 4, a: 5]
iex> Keyword.split(d, [:a, :c, :e])
- { [a: 1, c: 3, a: 5], [b: 2, d: 4] }
+ {[a: 1, c: 3, a: 5], [b: 2, d: 4]}
"""
def split(keywords, keys) when is_list(keywords) do
- acc = { [], [] }
+ acc = {[], []}
- { take, drop } = Enum.reduce keywords, acc, fn({ k, v }, { take, drop }) ->
+ {take, drop} = Enum.reduce keywords, acc, fn({k, v}, {take, drop}) ->
case k in keys do
- true -> { [{k, v}|take], drop }
- false -> { take, [{k, v}|drop] }
+ true -> {[{k, v}|take], drop}
+ false -> {take, [{k, v}|drop]}
end
end
- { Enum.reverse(take), Enum.reverse(drop) }
+ {Enum.reverse(take), Enum.reverse(drop)}
end
@doc """
@@ -481,7 +481,7 @@ defmodule Keyword do
"""
def take(keywords, keys) when is_list(keywords) do
- for { k, _ } = tuple <- keywords, k in keys, do: tuple
+ for {k, _} = tuple <- keywords, k in keys, do: tuple
end
@doc """
@@ -500,7 +500,7 @@ defmodule Keyword do
"""
def drop(keywords, keys) when is_list(keywords) do
- for { k, _ } = tuple <- keywords, not k in keys, do: tuple
+ for {k, _} = tuple <- keywords, not k in keys, do: tuple
end
@doc """
@@ -529,7 +529,7 @@ defmodule Keyword do
"""
def pop(keywords, key, default \\ nil) when is_list(keywords) do
- { get(keywords, key, default), delete(keywords, key) }
+ {get(keywords, key, default), delete(keywords, key)}
end
@doc """
@@ -558,6 +558,6 @@ defmodule Keyword do
"""
def pop_first(keywords, key, default \\ nil) when is_list(keywords) do
- { get(keywords, key, default), delete_first(keywords, key) }
+ {get(keywords, key, default), delete_first(keywords, key)}
end
end
diff --git a/lib/elixir/lib/list.ex b/lib/elixir/lib/list.ex
index 098ca1140..11db8748e 100644
--- a/lib/elixir/lib/list.ex
+++ b/lib/elixir/lib/list.ex
@@ -164,10 +164,10 @@ defmodule List do
## Examples
iex> List.keyfind([a: 1, b: 2], :a, 0)
- { :a, 1 }
+ {:a, 1}
iex> List.keyfind([a: 1, b: 2], 2, 1)
- { :b, 2 }
+ {:b, 2}
iex> List.keyfind([a: 1, b: 2], :c, 0)
nil
@@ -206,7 +206,7 @@ defmodule List do
## Examples
- iex> List.keyreplace([a: 1, b: 2], :a, 0, { :a, 3 })
+ iex> List.keyreplace([a: 1, b: 2], :a, 0, {:a, 3})
[a: 3, b: 2]
"""
@@ -240,10 +240,10 @@ defmodule List do
## Examples
- iex> List.keystore([a: 1, b: 2], :a, 0, { :a, 3 })
+ iex> List.keystore([a: 1, b: 2], :a, 0, {:a, 3})
[a: 3, b: 2]
- iex> List.keystore([a: 1, b: 2], :c, 0, { :c, 3 })
+ iex> List.keystore([a: 1, b: 2], :c, 0, {:c, 3})
[a: 1, b: 2, c: 3]
"""
@@ -463,21 +463,21 @@ defmodule List do
## Examples
iex> List.from_char_data("æß")
- { :ok, 'æß' }
+ {:ok, 'æß'}
iex> List.from_char_data([?a, "bc"])
- { :ok, 'abc' }
+ {:ok, 'abc'}
"""
- @spec from_char_data(char_data) :: { :ok, char_list } | { :error, list, binary } | { :incomplete, list, binary }
+ @spec from_char_data(char_data) :: {:ok, char_list} | {:error, list, binary} | {:incomplete, list, binary}
def from_char_data(char_data) do
case :unicode.characters_to_list(char_data) do
result when is_list(result) ->
- { :ok, result }
+ {:ok, result}
- { :error, _, _ } = error ->
+ {:error, _, _} = error ->
error
- { :incomplete, _, _ } = incomplete ->
+ {:incomplete, _, _} = incomplete ->
incomplete
end
end
@@ -506,10 +506,10 @@ defmodule List do
result when is_list(result) ->
result
- { :error, encoded, rest } ->
+ {:error, encoded, rest} ->
raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :invalid
- { :incomplete, encoded, rest } ->
+ {:incomplete, encoded, rest} ->
raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :incomplete
end
end
@@ -597,15 +597,15 @@ defmodule List do
end
defp do_zip_each(_, nil) do
- { nil, nil }
+ {nil, nil}
end
defp do_zip_each([h|t], acc) do
- { t, [h|acc] }
+ {t, [h|acc]}
end
defp do_zip_each([], _) do
- { nil, nil }
+ {nil, nil}
end
defp to_list(tuple) when is_tuple(tuple), do: tuple_to_list(tuple)
diff --git a/lib/elixir/lib/list_dict.ex b/lib/elixir/lib/list_dict.ex
index 4073157ec..71e14a856 100644
--- a/lib/elixir/lib/list_dict.ex
+++ b/lib/elixir/lib/list_dict.ex
@@ -36,11 +36,11 @@ defmodule ListDict do
end
def keys(dict) do
- for { key, _ } <- dict, do: key
+ for {key, _} <- dict, do: key
end
def values(dict) do
- for { _, value } <- dict, do: value
+ for {_, value} <- dict, do: value
end
def size(dict) do
@@ -52,37 +52,37 @@ defmodule ListDict do
do_has_key?(dict, key)
end
- defp do_has_key?([{ key, _ }|_], key), do: true
- defp do_has_key?([{ _, _ }|t], key), do: do_has_key?(t, key)
+ defp do_has_key?([{key, _}|_], key), do: true
+ defp do_has_key?([{_, _}|t], key), do: do_has_key?(t, key)
defp do_has_key?([], _key), do: false
def get(dict, key, default \\ nil) do
deprecated(key)
do_get(dict, key, default)
end
- defp do_get([{ key, value }|_], key, _default), do: value
- defp do_get([{ _, _ }|t], key, default), do: do_get(t, key, default)
+ defp do_get([{key, value}|_], key, _default), do: value
+ defp do_get([{_, _}|t], key, default), do: do_get(t, key, default)
defp do_get([], _key, default), do: default
def fetch(dict, key) do
deprecated(key)
do_fetch(dict, key)
end
- defp do_fetch([{ key, value }|_], key), do: { :ok, value }
- defp do_fetch([{ _, _ }|t], key), do: do_fetch(t, key)
+ defp do_fetch([{key, value}|_], key), do: {:ok, value}
+ defp do_fetch([{_, _}|t], key), do: do_fetch(t, key)
defp do_fetch([], _key), do: :error
def fetch!(dict, key) do
deprecated(key)
case fetch(dict, key) do
- { :ok, value } -> value
+ {:ok, value} -> value
:error -> raise(KeyError, key: key, term: dict)
end
end
def pop(dict, key, default \\ nil) do
deprecated(key)
- { do_get(dict, key, default), do_delete(dict, key) }
+ {do_get(dict, key, default), do_delete(dict, key)}
end
def put(dict, key, val) do
@@ -103,24 +103,24 @@ defmodule ListDict do
do_delete(dict, key)
end
- defp do_delete([{ key, _ }|t], key), do: t
- defp do_delete([{ _, _ } = h|t], key), do: [h|do_delete(t, key)]
+ defp do_delete([{key, _}|t], key), do: t
+ defp do_delete([{_, _} = h|t], key), do: [h|do_delete(t, key)]
defp do_delete([], _key), do: []
def merge(dict, enum, callback \\ fn(_k, _v1, v2) -> v2 end) do
- Enum.reduce enum, dict, fn { k, v2 }, acc ->
+ Enum.reduce enum, dict, fn {k, v2}, acc ->
update(acc, k, v2, fn(v1) -> callback.(k, v1, v2) end)
end
end
def split(dict, keys) do
- acc = { [], [] }
+ acc = {[], []}
- {take, drop} = Enum.reduce dict, acc, fn({ k, v }, { take, drop }) ->
+ {take, drop} = Enum.reduce dict, acc, fn({k, v}, {take, drop}) ->
if k in keys do
- { [{k, v}|take], drop }
+ {[{k, v}|take], drop}
else
- { take, [{k, v}|drop] }
+ {take, [{k, v}|drop]}
end
end
@@ -128,11 +128,11 @@ defmodule ListDict do
end
def take(dict, keys) do
- for { k, _ } = tuple <- dict, k in keys, do: tuple
+ for {k, _} = tuple <- dict, k in keys, do: tuple
end
def drop(dict, keys) do
- for { k, _ } = tuple <- dict, not k in keys, do: tuple
+ for {k, _} = tuple <- dict, not k in keys, do: tuple
end
def update!(list, key, fun) do
diff --git a/lib/elixir/lib/macro.ex b/lib/elixir/lib/macro.ex
index b2fd25f19..6f0a35961 100644
--- a/lib/elixir/lib/macro.ex
+++ b/lib/elixir/lib/macro.ex
@@ -6,10 +6,10 @@ defmodule Macro do
"""
@typedoc "Abstract Syntax Tree (AST)"
- @type t :: expr | { t, t } | atom | number | binary | pid | fun | [t]
+ @type t :: expr | {t, t} | atom | number | binary | pid | fun | [t]
@typedoc "Expr node (remaining ones are literals)"
- @type expr :: { expr | atom, Keyword.t, atom | [t] }
+ @type expr :: {expr | atom, Keyword.t, atom | [t]}
@binary_ops [:===, :!==,
:==, :!=, :<=, :>=,
@@ -27,7 +27,7 @@ defmodule Macro do
@doc false
defmacro unary_ops, do: @unary_ops
- @spec binary_op_props(atom) :: { :left | :right, precedence :: integer }
+ @spec binary_op_props(atom) :: {:left | :right, precedence :: integer}
defp binary_op_props(o) do
case o do
o when o in [:<-, :inlist, :inbits, :\\, :::] -> {:left, 40}
@@ -53,12 +53,12 @@ defmodule Macro do
Raises if the pipeline is ill-formed.
"""
@spec unpipe(Macro.t) :: [Macro.t]
- def unpipe({ :|> , _, [left, right] }) do
- [{ left, 0 }|unpipe(right)]
+ def unpipe({:|> , _, [left, right]}) do
+ [{left, 0}|unpipe(right)]
end
def unpipe(other) do
- [{ other, 0 }]
+ [{other, 0}]
end
@doc """
@@ -67,16 +67,16 @@ defmodule Macro do
@spec pipe(Macro.t, Macro.t, integer) :: Macro.t | no_return
def pipe(expr, call_args, position)
- def pipe(expr, { :&, _, _ } = call_args, _integer) do
+ def pipe(expr, {:&, _, _} = call_args, _integer) do
raise ArgumentError, message: "cannot pipe #{to_string expr} into #{to_string call_args}"
end
- def pipe(expr, { call, line, atom }, integer) when is_atom(atom) do
- { call, line, List.insert_at([], integer, expr) }
+ def pipe(expr, {call, line, atom}, integer) when is_atom(atom) do
+ {call, line, List.insert_at([], integer, expr)}
end
- def pipe(expr, { call, line, args }, integer) when is_list(args) do
- { call, line, List.insert_at(args, integer, expr) }
+ def pipe(expr, {call, line, args}, integer) when is_list(args) do
+ {call, line, List.insert_at(args, integer, expr)}
end
def pipe(expr, call_args, _integer) do
@@ -103,12 +103,12 @@ defmodule Macro do
@spec update_meta(t, (Keyword.t -> Keyword.t)) :: t
def update_meta(quoted, fun)
- def update_meta({ left, meta, right }, fun) when is_list(meta) do
- { update_meta(left, fun), fun.(meta), update_meta(right, fun) }
+ def update_meta({left, meta, right}, fun) when is_list(meta) do
+ {update_meta(left, fun), fun.(meta), update_meta(right, fun)}
end
- def update_meta({ left, right }, fun) do
- { update_meta(left, fun), update_meta(right, fun) }
+ def update_meta({left, right}, fun) do
+ {update_meta(left, fun), update_meta(right, fun)}
end
def update_meta(list, fun) when is_list(list) do
@@ -128,30 +128,30 @@ defmodule Macro do
## Examples
iex> Macro.decompose_call(quote do: foo)
- { :foo, [] }
+ {:foo, []}
iex> Macro.decompose_call(quote do: foo())
- { :foo, [] }
+ {:foo, []}
iex> Macro.decompose_call(quote do: foo(1, 2, 3))
- { :foo, [1, 2, 3] }
+ {:foo, [1, 2, 3]}
iex> Macro.decompose_call(quote do: Elixir.M.foo(1, 2, 3))
- { { :__aliases__, [], [:Elixir, :M] }, :foo, [1, 2, 3] }
+ {{:__aliases__, [], [:Elixir, :M]}, :foo, [1, 2, 3]}
iex> Macro.decompose_call(quote do: 42)
:error
"""
- @spec decompose_call(Macro.t) :: { atom, [Macro.t] } | { Macro.t, atom, [Macro.t] } | :error
- def decompose_call({ { :., _, [remote, function] }, _, args }) when is_tuple(remote) or is_atom(remote),
- do: { remote, function, args }
+ @spec decompose_call(Macro.t) :: {atom, [Macro.t]} | {Macro.t, atom, [Macro.t]} | :error
+ def decompose_call({{:., _, [remote, function]}, _, args}) when is_tuple(remote) or is_atom(remote),
+ do: {remote, function, args}
- def decompose_call({ name, _, args }) when is_atom(name) and is_atom(args),
- do: { name, [] }
+ def decompose_call({name, _, args}) when is_atom(name) and is_atom(args),
+ do: {name, []}
- def decompose_call({ name, _, args }) when is_atom(name) and is_list(args),
- do: { name, args }
+ def decompose_call({name, _, args}) when is_atom(name) and is_list(args),
+ do: {name, args}
def decompose_call(_),
do: :error
@@ -169,10 +169,10 @@ defmodule Macro do
iex> Macro.escape(:foo)
:foo
- iex> Macro.escape({ :a, :b, :c })
- { :{}, [], [:a, :b, :c] }
+ iex> Macro.escape({:a, :b, :c})
+ {:{}, [], [:a, :b, :c]}
- iex> Macro.escape({ :unquote, [], [1] }, unquote: true)
+ iex> Macro.escape({:unquote, [], [1]}, unquote: true)
1
"""
@@ -299,27 +299,27 @@ defmodule Macro do
def to_string(tree, fun \\ fn(_ast, string) -> string end)
# Variables
- def to_string({ var, _, atom } = ast, fun) when is_atom(atom) do
+ def to_string({var, _, atom} = ast, fun) when is_atom(atom) do
fun.(ast, atom_to_binary(var))
end
# Aliases
- def to_string({ :__aliases__, _, refs } = ast, fun) do
+ def to_string({:__aliases__, _, refs} = ast, fun) do
fun.(ast, Enum.map_join(refs, ".", &call_to_string(&1, fun)))
end
# Blocks
- def to_string({ :__block__, _, [expr] } = ast, fun) do
+ def to_string({:__block__, _, [expr]} = ast, fun) do
fun.(ast, to_string(expr, fun))
end
- def to_string({ :__block__, _, _ } = ast, fun) do
+ def to_string({:__block__, _, _} = ast, fun) do
block = adjust_new_lines block_to_string(ast, fun), "\n "
fun.(ast, "(\n " <> block <> "\n)")
end
# Bits containers
- def to_string({ :<<>>, _, args } = ast, fun) do
+ def to_string({:<<>>, _, args} = ast, fun) do
fun.(ast, case Enum.map_join(args, ", ", &to_string(&1, fun)) do
"<" <> rest -> "<< <" <> rest <> " >>"
rest -> "<<" <> rest <> ">>"
@@ -327,45 +327,45 @@ defmodule Macro do
end
# Tuple containers
- def to_string({ :{}, _, args } = ast, fun) do
+ def to_string({:{}, _, args} = ast, fun) do
tuple = "{" <> Enum.map_join(args, ", ", &to_string(&1, fun)) <> "}"
fun.(ast, tuple)
end
# Map containers
- def to_string({ :%{}, _, args } = ast, fun) do
+ def to_string({:%{}, _, args} = ast, fun) do
map = "%{" <> map_to_string(args, fun) <> "}"
fun.(ast, map)
end
- def to_string({ :%, _, [structname, map] } = ast, fun) do
- { :%{}, _, args } = map
+ def to_string({:%, _, [structname, map]} = ast, fun) do
+ {:%{}, _, args} = map
struct = "%" <> to_string(structname, fun) <> "{" <> map_to_string(args, fun) <> "}"
fun.(ast, struct)
end
# Fn keyword
- def to_string({ :fn, _, [{ :->, _, [_, tuple] }] = arrow } = ast, fun)
+ def to_string({:fn, _, [{:->, _, [_, tuple]}] = arrow} = ast, fun)
when not is_tuple(tuple) or elem(tuple, 0) != :__block__ do
fun.(ast, "fn " <> arrow_to_string(arrow, fun) <> " end")
end
- def to_string({ :fn, _, [{ :->, _, _ }] = block } = ast, fun) do
+ def to_string({:fn, _, [{:->, _, _}] = block} = ast, fun) do
fun.(ast, "fn " <> block_to_string(block, fun) <> "\nend")
end
- def to_string({ :fn, _, block } = ast, fun) do
+ def to_string({:fn, _, block} = ast, fun) do
block = adjust_new_lines block_to_string(block, fun), "\n "
fun.(ast, "fn\n " <> block <> "\nend")
end
# left -> right
- def to_string([{ :->, _, _ }|_] = ast, fun) do
+ def to_string([{:->, _, _}|_] = ast, fun) do
fun.(ast, "(" <> arrow_to_string(ast, fun, true) <> ")")
end
# left when right
- def to_string({ :when, _, [left, right] } = ast, fun) do
+ def to_string({:when, _, [left, right]} = ast, fun) do
if right != [] and Keyword.keyword?(right) do
right = kw_list_to_string(right, fun)
else
@@ -376,38 +376,38 @@ defmodule Macro do
end
# Binary ops
- def to_string({ op, _, [left, right] } = ast, fun) when op in unquote(@binary_ops) do
+ def to_string({op, _, [left, right]} = ast, fun) when op in unquote(@binary_ops) do
fun.(ast, op_to_string(left, fun, op, :left) <> " #{op} " <> op_to_string(right, fun, op, :right))
end
# Splat when
- def to_string({ :when, _, args } = ast, fun) do
- { left, right } = :elixir_utils.split_last(args)
+ def to_string({:when, _, args} = ast, fun) do
+ {left, right} = :elixir_utils.split_last(args)
fun.(ast, "(" <> Enum.map_join(left, ", ", &to_string(&1, fun)) <> ") when " <> to_string(right, fun))
end
# Unary ops
- def to_string({ unary, _, [{ binary, _, [_, _] } = arg] } = ast, fun)
+ def to_string({unary, _, [{binary, _, [_, _]} = arg]} = ast, fun)
when unary in unquote(@unary_ops) and binary in unquote(@binary_ops) do
fun.(ast, atom_to_binary(unary) <> "(" <> to_string(arg, fun) <> ")")
end
- def to_string({ :not, _, [arg] } = ast, fun) do
+ def to_string({:not, _, [arg]} = ast, fun) do
fun.(ast, "not " <> to_string(arg, fun))
end
- def to_string({ op, _, [arg] } = ast, fun) when op in unquote(@unary_ops) do
+ def to_string({op, _, [arg]} = ast, fun) when op in unquote(@unary_ops) do
fun.(ast, atom_to_binary(op) <> to_string(arg, fun))
end
# Access
- def to_string({ { :., _, [Kernel, :access] }, _, [left, right] } = ast, fun) do
+ def to_string({{:., _, [Kernel, :access]}, _, [left, right]} = ast, fun) do
fun.(ast, to_string(left, fun) <> to_string(right, fun))
end
# All other calls
- def to_string({ target, _, args } = ast, fun) when is_list(args) do
- { list, last } = :elixir_utils.split_last(args)
+ def to_string({target, _, args} = ast, fun) when is_list(args) do
+ {list, last} = :elixir_utils.split_last(args)
fun.(ast, case kw_blocks?(last) do
true -> call_to_string_with_args(target, list, fun) <> kw_blocks_to_string(last, fun)
false -> call_to_string_with_args(target, args, fun)
@@ -415,8 +415,8 @@ defmodule Macro do
end
# Two-item tuples
- def to_string({ left, right }, fun) do
- to_string({ :{}, [], [left, right] }, fun)
+ def to_string({left, right}, fun) do
+ to_string({:{}, [], [left, right]}, fun)
end
# Lists
@@ -448,8 +448,8 @@ defmodule Macro do
defp module_to_string(other, fun), do: call_to_string(other, fun)
defp call_to_string(atom, _fun) when is_atom(atom), do: atom_to_binary(atom)
- defp call_to_string({ :., _, [arg] }, fun), do: module_to_string(arg, fun) <> "."
- defp call_to_string({ :., _, [left, right] }, fun), do: module_to_string(left, fun) <> "." <> call_to_string(right, fun)
+ defp call_to_string({:., _, [arg]}, fun), do: module_to_string(arg, fun) <> "."
+ defp call_to_string({:., _, [left, right]}, fun), do: module_to_string(left, fun) <> "." <> call_to_string(right, fun)
defp call_to_string(other, fun), do: to_string(other, fun)
defp call_to_string_with_args(target, args, fun) do
@@ -459,7 +459,7 @@ defmodule Macro do
end
defp args_to_string(args, fun) do
- { list, last } = :elixir_utils.split_last(args)
+ {list, last} = :elixir_utils.split_last(args)
if last != [] and Keyword.keyword?(last) do
args = Enum.map_join(list, ", ", &to_string(&1, fun))
@@ -484,14 +484,14 @@ defmodule Macro do
atom_to_binary(key) <> "\n " <> block <> "\n"
end
- defp block_to_string([{ :->, _, _ }|_] = block, fun) do
- Enum.map_join(block, "\n", fn({ :->, _, [left, right] }) ->
+ defp block_to_string([{:->, _, _}|_] = block, fun) do
+ Enum.map_join(block, "\n", fn({:->, _, [left, right]}) ->
left = comma_join_or_empty_paren(left, fun, false)
left <> "->\n " <> adjust_new_lines block_to_string(right, fun), "\n "
end)
end
- defp block_to_string({ :__block__, _, exprs }, fun) do
+ defp block_to_string({:__block__, _, exprs}, fun) do
Enum.map_join(exprs, "\n", &to_string(&1, fun))
end
@@ -509,13 +509,13 @@ defmodule Macro do
end
defp kw_list_to_string(list, fun) do
- Enum.map_join(list, ", ", fn { key, value } ->
+ Enum.map_join(list, ", ", fn {key, value} ->
atom_to_binary(key) <> ": " <> to_string(value, fun)
end)
end
defp map_list_to_string(list, fun) do
- Enum.map_join(list, ", ", fn { key, value } ->
+ Enum.map_join(list, ", ", fn {key, value} ->
to_string(key, fun) <> " => " <> to_string(value, fun)
end)
end
@@ -524,9 +524,9 @@ defmodule Macro do
"(" <> to_string(expr, fun) <> ")"
end
- defp op_to_string({ op, _, [_, _] } = expr, fun, parent_op, side) when op in unquote(@binary_ops) do
- { parent_assoc, parent_prec } = binary_op_props(parent_op)
- { _, prec } = binary_op_props(op)
+ defp op_to_string({op, _, [_, _]} = expr, fun, parent_op, side) when op in unquote(@binary_ops) do
+ {parent_assoc, parent_prec} = binary_op_props(parent_op)
+ {_, prec} = binary_op_props(op)
cond do
parent_prec < prec -> to_string(expr, fun)
parent_prec > prec -> parenthise(expr, fun)
@@ -543,7 +543,7 @@ defmodule Macro do
defp op_to_string(expr, fun, _, _), do: to_string(expr, fun)
defp arrow_to_string(pairs, fun, paren \\ false) do
- Enum.map_join(pairs, "; ", fn({ :->, _, [left, right] }) ->
+ Enum.map_join(pairs, "; ", fn({:->, _, [left, right]}) ->
left = comma_join_or_empty_paren(left, fun, paren)
left <> "-> " <> to_string(right, fun)
end)
@@ -609,7 +609,7 @@ defmodule Macro do
The compilation will fail because `My.Module` when quoted
is not an atom, but a syntax tree as follow:
- {:__aliases__, [], [:My, :Module] }
+ {:__aliases__, [], [:My, :Module]}
That said, we need to expand the aliases node above to an
atom, so we can retrieve its length. Expanding the node is
@@ -645,11 +645,11 @@ defmodule Macro do
elem(do_expand_once(ast, env), 0)
end
- defp do_expand_once({ :__aliases__, _, _ } = original, env) do
+ defp do_expand_once({:__aliases__, _, _} = original, env) do
case :elixir_aliases.expand(original, env.aliases, env.macro_aliases, env.lexical_tracker) do
receiver when is_atom(receiver) ->
:elixir_lexical.record_remote(receiver, env.lexical_tracker)
- { receiver, true }
+ {receiver, true}
aliases ->
aliases = for alias <- aliases, do: elem(do_expand_once(alias, env), 0)
@@ -657,100 +657,100 @@ defmodule Macro do
true ->
receiver = :elixir_aliases.concat(aliases)
:elixir_lexical.record_remote(receiver, env.lexical_tracker)
- { receiver, true }
+ {receiver, true}
false ->
- { original, false }
+ {original, false}
end
end
end
# Expand @ calls
- defp do_expand_once({ :@, _, [{ name, _, args }] } = original, env) when is_atom(args) or args == [] do
+ defp do_expand_once({:@, _, [{name, _, args}]} = original, env) when is_atom(args) or args == [] do
case (module = env.module) && Module.open?(module) do
- true -> { Module.get_attribute(module, name), true }
- false -> { original, false }
+ true -> {Module.get_attribute(module, name), true}
+ false -> {original, false}
end
end
# Expand pseudo-variables
- defp do_expand_once({ :__MODULE__, _, atom }, env) when is_atom(atom),
- do: { env.module, true }
- defp do_expand_once({ :__DIR__, _, atom }, env) when is_atom(atom),
- do: { :filename.dirname(env.file), true }
- defp do_expand_once({ :__ENV__, _, atom }, env) when is_atom(atom),
- do: { { :{}, [], tuple_to_list(env) }, true }
- defp do_expand_once({ { :., _, [{ :__ENV__, _, atom }, field] }, _, [] } = original, env) when
+ defp do_expand_once({:__MODULE__, _, atom}, env) when is_atom(atom),
+ do: {env.module, true}
+ defp do_expand_once({:__DIR__, _, atom}, env) when is_atom(atom),
+ do: {:filename.dirname(env.file), true}
+ defp do_expand_once({:__ENV__, _, atom}, env) when is_atom(atom),
+ do: {{:{}, [], tuple_to_list(env)}, true}
+ defp do_expand_once({{:., _, [{:__ENV__, _, atom}, field]}, _, []} = original, env) when
is_atom(atom) and is_atom(field) do
case :erlang.function_exported(Macro.Env, field, 1) do
- true -> { apply(env, field, []), true }
- false -> { original, false }
+ true -> {apply(env, field, []), true}
+ false -> {original, false}
end
end
# Expand possible macro import invocation
- defp do_expand_once({ atom, meta, context } = original, env)
+ defp do_expand_once({atom, meta, context} = original, env)
when is_atom(atom) and is_list(meta) and is_atom(context) do
- if :lists.member({ atom, Keyword.get(meta, :counter, context) }, env.vars) do
- { original, false }
+ if :lists.member({atom, Keyword.get(meta, :counter, context)}, env.vars) do
+ {original, false}
else
- case do_expand_once({ atom, meta, [] }, env) do
- { _, true } = exp -> exp
- { _, false } -> { original, false }
+ case do_expand_once({atom, meta, []}, env) do
+ {_, true} = exp -> exp
+ {_, false} -> {original, false}
end
end
end
- defp do_expand_once({ atom, meta, args } = original, env)
+ defp do_expand_once({atom, meta, args} = original, env)
when is_atom(atom) and is_list(args) and is_list(meta) do
arity = length(args)
if :elixir_import.special_form(atom, arity) do
- { original, false }
+ {original, false}
else
module = env.module
extra = if function_exported?(module, :__info__, 1) do
- [{ module, module.__info__(:macros) }]
+ [{module, module.__info__(:macros)}]
else
[]
end
- expand = :elixir_dispatch.expand_import(meta, { atom, length(args) }, args,
+ expand = :elixir_dispatch.expand_import(meta, {atom, length(args)}, args,
:elixir_env.ex_to_env(env), extra)
case expand do
- { :ok, receiver, quoted } ->
+ {:ok, receiver, quoted} ->
next = :elixir_counter.next
- { :elixir_quote.linify_with_context_counter(0, { receiver, next }, quoted), true }
- { :ok, _receiver } ->
- { original, false }
+ {:elixir_quote.linify_with_context_counter(0, {receiver, next}, quoted), true}
+ {:ok, _receiver} ->
+ {original, false}
:error ->
- { original, false }
+ {original, false}
end
end
end
# Expand possible macro require invocation
- defp do_expand_once({ { :., _, [left, right] }, meta, args } = original, env) when is_atom(right) do
- { receiver, _ } = do_expand_once(left, env)
+ defp do_expand_once({{:., _, [left, right]}, meta, args} = original, env) when is_atom(right) do
+ {receiver, _} = do_expand_once(left, env)
case is_atom(receiver) do
- false -> { original, false }
+ false -> {original, false}
true ->
- expand = :elixir_dispatch.expand_require(meta, receiver, { right, length(args) },
+ expand = :elixir_dispatch.expand_require(meta, receiver, {right, length(args)},
args, :elixir_env.ex_to_env(env))
case expand do
- { :ok, receiver, quoted } ->
+ {:ok, receiver, quoted} ->
next = :elixir_counter.next
- { :elixir_quote.linify_with_context_counter(0, { receiver, next }, quoted), true }
+ {:elixir_quote.linify_with_context_counter(0, {receiver, next}, quoted), true}
:error ->
- { original, false }
+ {original, false}
end
end
end
# Anything else is just returned
- defp do_expand_once(other, _env), do: { other, false }
+ defp do_expand_once(other, _env), do: {other, false}
@doc """
Receives an AST node and expands it until it can no longer
@@ -760,14 +760,14 @@ defmodule Macro do
`expand_once/2` for more information and exmaples.
"""
def expand(tree, env) do
- expand_until({ tree, true }, env)
+ expand_until({tree, true}, env)
end
- defp expand_until({ tree, true }, env) do
+ defp expand_until({tree, true}, env) do
expand_until(do_expand_once(tree, env), env)
end
- defp expand_until({ tree, false }, _env) do
+ defp expand_until({tree, false}, _env) do
tree
end
@@ -777,22 +777,22 @@ defmodule Macro do
Terms are considered safe if they represent data structures and don't actually
evaluate code. Returns `:ok` unless a given term is unsafe,
- which is returned as `{ :unsafe, term }`.
+ which is returned as `{:unsafe, term}`.
"""
def safe_term(terms) do
do_safe_term(terms) || :ok
end
- defp do_safe_term({ local, _, terms }) when local in [:{}, :%{}, :__aliases__] do
+ defp do_safe_term({local, _, terms}) when local in [:{}, :%{}, :__aliases__] do
do_safe_term(terms)
end
- defp do_safe_term({ unary, _, [term] }) when unary in [:+, :-] do
+ defp do_safe_term({unary, _, [term]}) when unary in [:+, :-] do
do_safe_term(term)
end
- defp do_safe_term({ left, right }), do: do_safe_term(left) || do_safe_term(right)
+ defp do_safe_term({left, right}), do: do_safe_term(left) || do_safe_term(right)
defp do_safe_term(terms) when is_list(terms), do: Enum.find_value(terms, &do_safe_term(&1))
- defp do_safe_term(terms) when is_tuple(terms), do: { :unsafe, terms }
+ defp do_safe_term(terms) when is_tuple(terms), do: {:unsafe, terms}
defp do_safe_term(_), do: nil
end
diff --git a/lib/elixir/lib/macro/env.ex b/lib/elixir/lib/macro/env.ex
index 8a06ea809..61da9c171 100644
--- a/lib/elixir/lib/macro/env.ex
+++ b/lib/elixir/lib/macro/env.ex
@@ -9,7 +9,7 @@ defmodule Macro.Env do
* `module` - the current module name.
* `file` - the current file name as a binary
* `line` - the current line as an integer
- * `function` - a tuple as `{ atom, integer` }, where the first
+ * `function` - a tuple as `{atom, integer`}, where the first
element is the function name and the seconds its arity. Returns
`nil` if not inside a function
* `context` - the context of the environment. It can be nil
@@ -21,23 +21,23 @@ defmodule Macro.Env do
* `macros` - a list of macros imported from each module
* `macro_aliases` - a list of aliases defined inside the current macro
* `context_modules` - a list of modules defined in the current context
- * `vars` - a list keeping all defined variables as { var, context }
+ * `vars` - a list keeping all defined variables as {var, context}
* `export_vars` - a list keeping all variables to be exported in a construct (may be nil)
* `lexical_tracker` - PID to the lexical tracker which is responsible to keep user info
* `local` - the module to expand local functions to
"""
- @type name_arity :: { atom, non_neg_integer }
+ @type name_arity :: {atom, non_neg_integer}
@type file :: binary
@type line :: non_neg_integer
- @type aliases :: [{ module, module }]
- @type macro_aliases :: [{ module, { integer, module } }]
+ @type aliases :: [{module, module}]
+ @type macro_aliases :: [{module, {integer, module}}]
@type context :: :match | :guard | nil
@type requires :: [module]
- @type functions :: [{ module, [name_arity] }]
- @type macros :: [{ module, [name_arity] }]
+ @type functions :: [{module, [name_arity]}]
+ @type macros :: [{module, [name_arity]}]
@type context_modules :: [module]
- @type vars :: [{ atom, atom | non_neg_integer }]
+ @type vars :: [{atom, atom | non_neg_integer}]
@type export_vars :: vars | nil
@type lexical_tracker :: pid
@type local :: module | nil
@@ -81,12 +81,12 @@ defmodule Macro.Env do
def stacktrace(record) do
cond do
nil?(record.module) ->
- [{ :elixir_compiler, :__FILE__, 1, relative_location(record) }]
+ [{:elixir_compiler, :__FILE__, 1, relative_location(record)}]
nil?(record.function) ->
- [{ module(record), :__MODULE__, 0, relative_location(record) }]
+ [{module(record), :__MODULE__, 0, relative_location(record)}]
true ->
- { name, arity } = record.function
- [{ module(record), name, arity, relative_location(record) }]
+ {name, arity} = record.function
+ [{module(record), name, arity, relative_location(record)}]
end
end
diff --git a/lib/elixir/lib/map.ex b/lib/elixir/lib/map.ex
index 9f06025dc..3b7cca0db 100644
--- a/lib/elixir/lib/map.ex
+++ b/lib/elixir/lib/map.ex
@@ -37,7 +37,7 @@ defmodule Map do
def fetch(map, key), do: :maps.find(key, map)
def pop(map, key, default \\ nil) do
- { get(map, key, default), delete(map, key) }
+ {get(map, key, default), delete(map, key)}
end
def put(map, key, val) do
@@ -60,13 +60,13 @@ defmodule Map do
end
def split(map, keys) do
- acc = { %{}, %{} }
+ acc = {%{}, %{}}
- :maps.fold fn k, v, { take, drop } ->
+ :maps.fold fn k, v, {take, drop} ->
if k in keys do
- { put(take, k, v), drop }
+ {put(take, k, v), drop}
else
- { take, put(drop, k, v) }
+ {take, put(drop, k, v)}
end
end, acc, map
end
@@ -75,7 +75,7 @@ defmodule Map do
case :maps.find(key, map) do
:error ->
raise(KeyError, key: key, term: map)
- { :ok, val } ->
+ {:ok, val} ->
:maps.put(key, fun.(val), map)
end
end
@@ -84,7 +84,7 @@ defmodule Map do
case :maps.find(key, map) do
:error ->
:maps.put(key, initial, map)
- { :ok, val } ->
+ {:ok, val} ->
:maps.put(key, fun.(val), map)
end
end
diff --git a/lib/elixir/lib/module.ex b/lib/elixir/lib/module.ex
index 966a1d6d7..87055a6f6 100644
--- a/lib/elixir/lib/module.ex
+++ b/lib/elixir/lib/module.ex
@@ -24,7 +24,7 @@ defmodule Module do
A hook that will be invoked right after the current module is compiled.
- Accepts a module or a tuple `{ <module>, <function atom> }`. The function
+ Accepts a module or a tuple `{<module>, <function atom>}`. The function
must take two arguments: the module environment and its bytecode.
When just a module is provided, the function is assumed to be
`__after_compile__/2`.
@@ -43,7 +43,7 @@ defmodule Module do
A hook that will be invoked before the module is compiled.
- Accepts a module or a tuple `{ <module>, <function/macro atom> }`. The
+ Accepts a module or a tuple `{<module>, <function/macro atom>}`. The
function/macro must take one argument: the module environment. If it's a
macro, its returned value will be injected at the end of the module definition
before the compilation starts.
@@ -94,7 +94,7 @@ defmodule Module do
### Example
defmodule M do
- @compile { :inline, myfun: 1 }
+ @compile {:inline, myfun: 1}
def myfun(arg) do
to_string(arg)
@@ -167,7 +167,7 @@ defmodule Module do
A hook that will be invoked when each function or macro in the current
module is defined. Useful when annotating functions.
- Accepts a module or a tuple `{ <module>, <function atom> }`. The function
+ Accepts a module or a tuple `{<module>, <function atom>}`. The function
must take 6 arguments:
- the module environment
@@ -206,7 +206,7 @@ defmodule Module do
end
defmodule M do
- @on_definition { H, :on_def }
+ @on_definition {H, :on_def}
def hello(arg) when is_binary(arg) or is_list(arg) do
"Hello" <> to_string(arg)
@@ -293,7 +293,7 @@ defmodule Module do
* `:docs` - list of all docstrings attached to functions and macros
using the `@doc` attribute
- * `:moduledoc` - tuple `{ <line>, <doc> }` where `line` is the line on
+ * `:moduledoc` - tuple `{<line>, <doc>}` where `line` is the line on
which module definition starts and `doc` is the string
attached to the module using the `@moduledoc` attribute
@@ -355,9 +355,9 @@ defmodule Module do
def eval_quoted(module, quoted, binding, opts) do
assert_not_compiled!(:eval_quoted, module)
:elixir_def.reset_last(module)
- { value, binding, _env, _scope } =
+ {value, binding, _env, _scope} =
:elixir.eval_quoted quoted, binding, Keyword.put(opts, :module, module)
- { value, binding }
+ {value, binding}
end
@doc """
@@ -497,7 +497,7 @@ defmodule Module do
## Examples
defmodule MyModule do
- Module.add_doc(__MODULE__, __ENV__.line + 1, :def, { :version, 0 }, [], "Manually added docs")
+ Module.add_doc(__MODULE__, __ENV__.line + 1, :def, {:version, 0}, [], "Manually added docs")
def version, do: 1
end
@@ -505,7 +505,7 @@ defmodule Module do
def add_doc(module, line, kind, tuple, signature \\ [], doc)
def add_doc(_module, _line, kind, _tuple, _signature, doc) when kind in [:defp, :defmacrop, :typep] do
- if doc, do: { :error, :private_doc }, else: :ok
+ if doc, do: {:error, :private_doc}, else: :ok
end
def add_doc(module, line, kind, tuple, signature, doc) when
@@ -513,56 +513,56 @@ defmodule Module do
assert_not_compiled!(:add_doc, module)
table = docs_table_for(module)
- { signature, _ } = Enum.map_reduce signature, 1, fn(x, acc) ->
- { simplify_signature(x, acc), acc + 1 }
+ {signature, _} = Enum.map_reduce signature, 1, fn(x, acc) ->
+ {simplify_signature(x, acc), acc + 1}
end
case :ets.lookup(table, tuple) do
[] ->
- :ets.insert(table, { tuple, line, kind, signature, doc })
+ :ets.insert(table, {tuple, line, kind, signature, doc})
:ok
- [{ tuple, line, _old_kind, old_sign, old_doc }] ->
+ [{tuple, line, _old_kind, old_sign, old_doc}] ->
:ets.insert(table, {
tuple,
line,
kind,
merge_signatures(old_sign, signature, 1),
if(nil?(doc), do: old_doc, else: doc)
- })
+ })
:ok
end
end
# Simplify signatures to be stored in docs
- defp simplify_signature({ :\\, _, [left, right ] }, i) do
- { :\\, [], [simplify_signature(left, i), right] }
+ defp simplify_signature({:\\, _, [left, right ]}, i) do
+ {:\\, [], [simplify_signature(left, i), right]}
end
- defp simplify_signature({ :%, _, [left, _] }, _i) when is_atom(left) do
+ defp simplify_signature({:%, _, [left, _]}, _i) when is_atom(left) do
last = List.last(String.split(atom_to_binary(left), "."))
atom = binary_to_atom(String.downcase(last))
- { atom, [], nil }
+ {atom, [], nil}
end
- defp simplify_signature({ :=, _, [_, right] }, i) do
+ defp simplify_signature({:=, _, [_, right]}, i) do
simplify_signature(right, i)
end
- defp simplify_signature({ var, _, atom }, _i) when is_atom(atom) do
+ defp simplify_signature({var, _, atom}, _i) when is_atom(atom) do
case atom_to_binary(var) do
- "_" <> rest -> { binary_to_atom(rest), [], Elixir }
- _ -> { var, [], nil }
+ "_" <> rest -> {binary_to_atom(rest), [], Elixir}
+ _ -> {var, [], nil}
end
end
- defp simplify_signature(other, i) when is_integer(other), do: { :"int#{i}", [], Elixir }
- defp simplify_signature(other, i) when is_boolean(other), do: { :"bool#{i}", [], Elixir }
- defp simplify_signature(other, i) when is_atom(other), do: { :"atom#{i}", [], Elixir }
- defp simplify_signature(other, i) when is_list(other), do: { :"list#{i}", [], Elixir }
- defp simplify_signature(other, i) when is_float(other), do: { :"float#{i}", [], Elixir }
- defp simplify_signature(other, i) when is_binary(other), do: { :"binary#{i}", [], Elixir }
- defp simplify_signature(_, i), do: { :"arg#{i}", [], Elixir }
+ defp simplify_signature(other, i) when is_integer(other), do: {:"int#{i}", [], Elixir}
+ defp simplify_signature(other, i) when is_boolean(other), do: {:"bool#{i}", [], Elixir}
+ defp simplify_signature(other, i) when is_atom(other), do: {:"atom#{i}", [], Elixir}
+ defp simplify_signature(other, i) when is_list(other), do: {:"list#{i}", [], Elixir}
+ defp simplify_signature(other, i) when is_float(other), do: {:"float#{i}", [], Elixir}
+ defp simplify_signature(other, i) when is_binary(other), do: {:"binary#{i}", [], Elixir}
+ defp simplify_signature(_, i), do: {:"arg#{i}", [], Elixir}
# Merge
@@ -574,23 +574,23 @@ defmodule Module do
[]
end
- defp merge_signature({ :\\, line, [left, right] }, newer, i) do
- { :\\, line, [merge_signature(left, newer, i), right] }
+ defp merge_signature({:\\, line, [left, right]}, newer, i) do
+ {:\\, line, [merge_signature(left, newer, i), right]}
end
- defp merge_signature(older, { :\\, _, [left, _] }, i) do
+ defp merge_signature(older, {:\\, _, [left, _]}, i) do
merge_signature(older, left, i)
end
# The older signature, when given, always have higher precedence
- defp merge_signature({ _, _, nil } = older, _newer, _), do: older
- defp merge_signature(_older, { _, _, nil } = newer, _), do: newer
+ defp merge_signature({_, _, nil} = older, _newer, _), do: older
+ defp merge_signature(_older, {_, _, nil} = newer, _), do: newer
# Both are a guess, so check if they are the same guess
- defp merge_signature({ var, _, _ } = older, { var, _, _ }, _), do: older
+ defp merge_signature({var, _, _} = older, {var, _, _}, _), do: older
# Otherwise, returns a generic guess
- defp merge_signature({ _, line, _ }, _newer, i), do: { :"arg#{i}", line, Elixir }
+ defp merge_signature({_, line, _}, _newer, i), do: {:"arg#{i}", line, Elixir}
@doc """
Checks if the module defines the given function or macro.
@@ -599,9 +599,9 @@ defmodule Module do
## Examples
defmodule Example do
- Module.defines? __MODULE__, { :version, 0 } #=> false
+ Module.defines? __MODULE__, {:version, 0} #=> false
def version, do: 1
- Module.defines? __MODULE__, { :version, 0 } #=> true
+ Module.defines? __MODULE__, {:version, 0} #=> true
end
"""
@@ -619,9 +619,9 @@ defmodule Module do
## Examples
defmodule Example do
- Module.defines? __MODULE__, { :version, 0 }, :defp #=> false
+ Module.defines? __MODULE__, {:version, 0}, :defp #=> false
def version, do: 1
- Module.defines? __MODULE__, { :version, 0 }, :defp #=> false
+ Module.defines? __MODULE__, {:version, 0}, :defp #=> false
end
"""
@@ -629,7 +629,7 @@ defmodule Module do
assert_not_compiled!(:defines?, module)
table = function_table_for(module)
case :ets.lookup(table, tuple) do
- [{ _, ^kind, _, _, _, _, _ }] -> true
+ [{_, ^kind, _, _, _, _, _}] -> true
_ -> false
end
end
@@ -648,7 +648,7 @@ defmodule Module do
def definitions_in(module) do
assert_not_compiled!(:definitions_in, module)
table = function_table_for(module)
- for { tuple, _, _, _, _, _, _ } <- :ets.tab2list(table), do: tuple
+ for {tuple, _, _, _, _, _, _} <- :ets.tab2list(table), do: tuple
end
@doc """
@@ -667,7 +667,7 @@ defmodule Module do
def definitions_in(module, kind) do
assert_not_compiled!(:definitions_in, module)
table = function_table_for(module)
- for { tuple, stored_kind, _, _, _, _, _ } <- :ets.tab2list(table), stored_kind == kind, do: tuple
+ for {tuple, stored_kind, _, _, _, _, _} <- :ets.tab2list(table), stored_kind == kind, do: tuple
end
@doc """
@@ -682,7 +682,7 @@ defmodule Module do
for tuple <- tuples do
case :elixir_def.lookup_definition(module, tuple) do
false ->
- { name, arity } = tuple
+ {name, arity} = tuple
raise "Cannot make function #{name}/#{arity} overridable because it was not defined"
clause ->
:elixir_def.delete_definition(module, tuple)
@@ -694,9 +694,9 @@ defmodule Module do
end
old = get_attribute(module, :__overridable)
- merged = :orddict.update(tuple, fn({ count, _, _, _ }) ->
- { count + 1, clause, neighbours, false }
- end, { 1, clause, neighbours, false }, old)
+ merged = :orddict.update(tuple, fn({count, _, _, _}) ->
+ {count + 1, clause, neighbours, false}
+ end, {1, clause, neighbours, false}, old)
put_attribute(module, :__overridable, merged)
end
@@ -738,7 +738,7 @@ defmodule Module do
value
end
- :ets.insert(table, { key, new })
+ :ets.insert(table, {key, new})
end
@doc """
@@ -860,12 +860,12 @@ defmodule Module do
if Keyword.get(opts, :persist) do
old = :ets.lookup_element(table, :__persisted_attributes, 2)
- :ets.insert(table, { :__persisted_attributes, [new|old] })
+ :ets.insert(table, {:__persisted_attributes, [new|old]})
end
if Keyword.get(opts, :accumulate) do
old = :ets.lookup_element(table, :__acc_attributes, 2)
- :ets.insert(table, { :__acc_attributes, [new|old] })
+ :ets.insert(table, {:__acc_attributes, [new|old]})
end
end
@@ -889,13 +889,13 @@ defmodule Module do
module = env.module
line = env.line
arity = length(args)
- pair = { name, arity }
+ pair = {name, arity}
doc = get_attribute(module, :doc)
case add_doc(module, line, kind, pair, args, doc) do
:ok ->
:ok
- { :error, :private_doc } ->
+ {:error, :private_doc} ->
:elixir_errors.warn line, env.file, "function #{name}/#{arity} is private, @doc's are always discarded for private functions\n"
end
@@ -915,13 +915,13 @@ defmodule Module do
[] -> [value]
end
- :ets.insert(table, { key, new })
+ :ets.insert(table, {key, new})
end
## Helpers
defp normalize_attribute(:on_load, atom) when is_atom(atom) do
- { atom, 0 }
+ {atom, 0}
end
defp normalize_attribute(:behaviour, atom) when is_atom(atom) do
@@ -935,7 +935,7 @@ defmodule Module do
defp normalize_attribute(key, atom) when is_atom(atom) and
key in [:before_compile, :after_compile, :on_definition] do
- { atom, :"__#{key}__" }
+ {atom, :"__#{key}__"}
end
defp normalize_attribute(key, _value) when key in [:type, :typep, :export_type, :opaque, :callback] do
diff --git a/lib/elixir/lib/module/locals_tracker.ex b/lib/elixir/lib/module/locals_tracker.ex
index 8160ed16f..2a2da62ad 100644
--- a/lib/elixir/lib/module/locals_tracker.ex
+++ b/lib/elixir/lib/module/locals_tracker.ex
@@ -12,21 +12,21 @@
# We also have can the following vertices:
#
# * `Module` - a module that was invoked via an import
-# * `{ name, arity }` - a local function/arity pair
-# * `{ :import, name, arity }` - an invoked function/arity import
+# * `{name, arity}` - a local function/arity pair
+# * `{:import, name, arity}` - an invoked function/arity import
#
# Each of those vertices can associate to other vertices
# as described below:
#
# * `Module`
-# * in neighbours: `{ :import, name, arity }`
+# * in neighbours: `{:import, name, arity}`
#
-# * `{ name, arity }`
-# * in neighbours: `:local`, `{ name, arity }`
-# * out neighbours: `{ :import, name, arity }`
+# * `{name, arity}`
+# * in neighbours: `:local`, `{name, arity}`
+# * out neighbours: `{:import, name, arity}`
#
-# * `{ :import, name, arity }`
-# * in neighbours: `{ name, arity }`
+# * `{:import, name, arity}`
+# * in neighbours: `{name, arity}`
# * out neighbours: `Module`
#
# Note that since this is required for bootstrap, we can't use
@@ -39,21 +39,21 @@ defmodule Module.LocalsTracker do
@type ref :: pid | module
@type name :: atom
- @type name_arity :: { name, arity }
+ @type name_arity :: {name, arity}
- @type local :: { name, arity }
- @type import :: { :import, name, arity }
+ @type local :: {name, arity}
+ @type import :: {:import, name, arity}
# Public API
@doc """
Returns all imported modules that had the given
- `{ name, arity }` invoked.
+ `{name, arity}` invoked.
"""
@spec imports_with_dispatch(ref, name_arity) :: [module]
- def imports_with_dispatch(ref, { name, arity }) do
+ def imports_with_dispatch(ref, {name, arity}) do
d = :gen_server.call(to_pid(ref), :digraph, @timeout)
- :digraph.out_neighbours(d, { :import, name, arity })
+ :digraph.out_neighbours(d, {:import, name, arity})
end
@doc """
@@ -71,7 +71,7 @@ defmodule Module.LocalsTracker do
defp reduce_reachable(d, vertex, vertices) do
neighbours = :digraph.out_neighbours(d, vertex)
- neighbours = (for { _, _ } = t <- neighbours, do: t) |> :ordsets.from_list
+ neighbours = (for {_, _} = t <- neighbours, do: t) |> :ordsets.from_list
remaining = :ordsets.subtract(neighbours, vertices)
vertices = :ordsets.union(neighbours, vertices)
:lists.foldl(&reduce_reachable(d, &1, &2), vertices, remaining)
@@ -80,7 +80,7 @@ defmodule Module.LocalsTracker do
defp to_pid(pid) when is_pid(pid), do: pid
defp to_pid(mod) when is_atom(mod) do
table = :elixir_module.data_table(mod)
- [{ _, val }] = :ets.lookup(table, :__locals_tracker)
+ [{_, val}] = :ets.lookup(table, :__locals_tracker)
val
end
@@ -89,7 +89,7 @@ defmodule Module.LocalsTracker do
# Starts the tracker and returns its pid.
@doc false
def start_link do
- { :ok, pid } = :gen_server.start_link(__MODULE__, [], [])
+ {:ok, pid} = :gen_server.start_link(__MODULE__, [], [])
pid
end
@@ -99,36 +99,36 @@ defmodule Module.LocalsTracker do
# a call is made to.
@doc false
def add_definition(pid, kind, tuple) when kind in [:def, :defp, :defmacro, :defmacrop] do
- :gen_server.cast(pid, { :add_definition, kind, tuple })
+ :gen_server.cast(pid, {:add_definition, kind, tuple})
end
# Adds and tracks defaults for a definition into the tracker.
@doc false
def add_defaults(pid, kind, tuple, defaults) when kind in [:def, :defp, :defmacro, :defmacrop] do
- :gen_server.cast(pid, { :add_defaults, kind, tuple, defaults })
+ :gen_server.cast(pid, {:add_defaults, kind, tuple, defaults})
end
# Adds a local dispatch to the given target.
def add_local(pid, to) when is_tuple(to) do
- :gen_server.cast(pid, { :add_local, :local, to })
+ :gen_server.cast(pid, {:add_local, :local, to})
end
# Adds a local dispatch from-to the given target.
@doc false
def add_local(pid, from, to) when is_tuple(from) and is_tuple(to) do
- :gen_server.cast(pid, { :add_local, from, to })
+ :gen_server.cast(pid, {:add_local, from, to})
end
# Adds a import dispatch to the given target.
@doc false
def add_import(pid, function, module, target) when is_atom(module) and is_tuple(target) do
- :gen_server.cast(pid, { :add_import, function, module, target })
+ :gen_server.cast(pid, {:add_import, function, module, target})
end
# Yanks a local node. Returns its in and out vertices in a tuple.
@doc false
def yank(pid, local) do
- :gen_server.call(to_pid(pid), { :yank, local }, @timeout)
+ :gen_server.call(to_pid(pid), {:yank, local}, @timeout)
end
# Reattach a previously yanked node
@@ -136,7 +136,7 @@ defmodule Module.LocalsTracker do
def reattach(pid, kind, tuple, neighbours) do
pid = to_pid(pid)
add_definition(pid, kind, tuple)
- :gen_server.cast(pid, { :reattach, tuple, neighbours })
+ :gen_server.cast(pid, {:reattach, tuple, neighbours})
end
# Collecting all conflicting imports with the given functions
@@ -144,11 +144,11 @@ defmodule Module.LocalsTracker do
def collect_imports_conflicts(pid, all_defined) do
d = :gen_server.call(pid, :digraph, @timeout)
- for { name, arity } <- all_defined,
- :digraph.in_neighbours(d, { :import, name, arity }) != [],
- n = :digraph.out_neighbours(d, { :import, name, arity }),
+ for {name, arity} <- all_defined,
+ :digraph.in_neighbours(d, {:import, name, arity}) != [],
+ n = :digraph.out_neighbours(d, {:import, name, arity}),
n != [] do
- { n, name, arity }
+ {n, name, arity}
end
end
@@ -161,40 +161,40 @@ defmodule Module.LocalsTracker do
:lists.foldl(&collect_unused_locals(&1, &2, reachable), [], private)
end
- defp collect_unused_locals({ tuple, kind, 0 }, acc, reachable) do
+ defp collect_unused_locals({tuple, kind, 0}, acc, reachable) do
if :lists.member(tuple, reachable) do
acc
else
- [{ :unused_def, tuple, kind }|acc]
+ [{:unused_def, tuple, kind}|acc]
end
end
- defp collect_unused_locals({ tuple, kind, default }, acc, reachable) when default > 0 do
- { name, arity } = tuple
+ defp collect_unused_locals({tuple, kind, default}, acc, reachable) when default > 0 do
+ {name, arity} = tuple
min = arity - default
max = arity
- invoked = for { n, a } <- reachable, n == name, a in min..max, do: a
+ invoked = for {n, a} <- reachable, n == name, a in min..max, do: a
if invoked == [] do
- [{ :unused_def, tuple, kind }|acc]
+ [{:unused_def, tuple, kind}|acc]
else
case :lists.min(invoked) - min do
0 -> acc
- ^default -> [{ :unused_args, tuple }|acc]
- unused_args -> [{ :unused_args, tuple, unused_args }|acc]
+ ^default -> [{:unused_args, tuple}|acc]
+ unused_args -> [{:unused_args, tuple, unused_args}|acc]
end
end
end
@doc false
def cache_env(pid, env) do
- :gen_server.call(pid, { :cache_env, env }, @timeout)
+ :gen_server.call(pid, {:cache_env, env}, @timeout)
end
@doc false
def get_cached_env(pid, ref) do
- :gen_server.call(pid, { :get_cached_env, ref }, @timeout)
+ :gen_server.call(pid, {:get_cached_env, ref}, @timeout)
end
# Stops the gen server
@@ -208,78 +208,78 @@ defmodule Module.LocalsTracker do
def init([]) do
d = :digraph.new([:protected])
:digraph.add_vertex(d, :local)
- { :ok, { d, [] } }
+ {:ok, {d, []}}
end
- def handle_call({ :cache_env, env }, _from, { d, cache }) do
+ def handle_call({:cache_env, env}, _from, {d, cache}) do
case cache do
[{i,^env}|_] ->
- { :reply, i, { d, cache } }
+ {:reply, i, {d, cache}}
t ->
i = length(t)
- { :reply, i, { d, [{i,env}|t] } }
+ {:reply, i, {d, [{i,env}|t]}}
end
end
- def handle_call({ :get_cached_env, ref }, _from, { _, cache } = state) do
- { ^ref, env } = :lists.keyfind(ref, 1, cache)
- { :reply, env, state }
+ def handle_call({:get_cached_env, ref}, _from, {_, cache} = state) do
+ {^ref, env} = :lists.keyfind(ref, 1, cache)
+ {:reply, env, state}
end
- def handle_call({ :yank, local }, _from, { d, _ } = state) do
+ def handle_call({:yank, local}, _from, {d, _} = state) do
in_vertices = :digraph.in_neighbours(d, local)
out_vertices = :digraph.out_neighbours(d, local)
:digraph.del_vertex(d, local)
- { :reply, { in_vertices, out_vertices }, state }
+ {:reply, {in_vertices, out_vertices}, state}
end
- def handle_call(:digraph, _from, { d, _ } = state) do
- { :reply, d, state }
+ def handle_call(:digraph, _from, {d, _} = state) do
+ {:reply, d, state}
end
def handle_call(request, _from, state) do
- { :stop, { :bad_call, request }, state }
+ {:stop, {:bad_call, request}, state}
end
def handle_info(_msg, state) do
- { :noreply, state }
+ {:noreply, state}
end
- def handle_cast({ :add_local, from, to }, { d, _ } = state) do
+ def handle_cast({:add_local, from, to}, {d, _} = state) do
handle_add_local(d, from, to)
- { :noreply, state }
+ {:noreply, state}
end
- def handle_cast({ :add_import, function, module, { name, arity } }, { d, _ } = state) do
+ def handle_cast({:add_import, function, module, {name, arity}}, {d, _} = state) do
handle_import(d, function, module, name, arity)
- { :noreply, state }
+ {:noreply, state}
end
- def handle_cast({ :add_definition, kind, tuple }, { d, _ } = state) do
+ def handle_cast({:add_definition, kind, tuple}, {d, _} = state) do
handle_add_definition(d, kind, tuple)
- { :noreply, state }
+ {:noreply, state}
end
- def handle_cast({ :add_defaults, kind, { name, arity }, defaults }, { d, _ } = state) do
+ def handle_cast({:add_defaults, kind, {name, arity}, defaults}, {d, _} = state) do
for i <- :lists.seq(arity - defaults, arity - 1) do
- handle_add_definition(d, kind, { name, i })
- handle_add_local(d, { name, i }, { name, i + 1 })
+ handle_add_definition(d, kind, {name, i})
+ handle_add_local(d, {name, i}, {name, i + 1})
end
- { :noreply, state }
+ {:noreply, state}
end
- def handle_cast({ :reattach, tuple, { in_neigh, out_neigh } }, { d, _ } = state) do
+ def handle_cast({:reattach, tuple, {in_neigh, out_neigh}}, {d, _} = state) do
for from <- in_neigh, do: replace_edge(d, from, tuple)
for to <- out_neigh, do: replace_edge(d, tuple, to)
- { :noreply, state }
+ {:noreply, state}
end
def handle_cast(:stop, state) do
- { :stop, :normal, state }
+ {:stop, :normal, state}
end
def handle_cast(msg, state) do
- { :stop, { :bad_cast, msg }, state }
+ {:stop, {:bad_cast, msg}, state}
end
def terminate(_reason, _state) do
@@ -287,13 +287,13 @@ defmodule Module.LocalsTracker do
end
def code_change(_old, state, _extra) do
- { :ok, state }
+ {:ok, state}
end
defp handle_import(d, function, module, name, arity) do
:digraph.add_vertex(d, module)
- tuple = { :import, name, arity }
+ tuple = {:import, name, arity}
:digraph.add_vertex(d, tuple)
replace_edge!(d, tuple, module)
diff --git a/lib/elixir/lib/option_parser.ex b/lib/elixir/lib/option_parser.ex
index a4360b768..c6da8cb3b 100644
--- a/lib/elixir/lib/option_parser.ex
+++ b/lib/elixir/lib/option_parser.ex
@@ -10,13 +10,13 @@ defmodule OptionParser do
## Examples
iex> OptionParser.parse(["--debug"])
- { [debug: true], [], [] }
+ {[debug: true], [], []}
iex> OptionParser.parse(["--source", "lib"])
- { [source: "lib"], [], [] }
+ {[source: "lib"], [], []}
iex> OptionParser.parse(["--source-path", "lib", "test/enum_test.exs", "--verbose"])
- { [source_path: "lib", verbose: true], ["test/enum_test.exs"], [] }
+ {[source_path: "lib", verbose: true], ["test/enum_test.exs"], []}
Notice how Elixir automatically translates the "--source-path"
switch to the underscored atom `:source_path`, which better follows
@@ -27,7 +27,7 @@ defmodule OptionParser do
A set of aliases can be given as the second argument:
iex> OptionParser.parse(["-d"], aliases: [d: :debug])
- { [debug: true], [], [] }
+ {[debug: true], [], []}
## Switches
@@ -54,17 +54,17 @@ defmodule OptionParser do
Examples:
iex> OptionParser.parse(["--unlock", "path/to/file"], switches: [unlock: :boolean])
- { [unlock: true], ["path/to/file"], [] }
+ {[unlock: true], ["path/to/file"], []}
iex> OptionParser.parse(["--unlock", "--limit", "0", "path/to/file"],
...> switches: [unlock: :boolean, limit: :integer])
- { [unlock: true, limit: 0], ["path/to/file"], [] }
+ {[unlock: true, limit: 0], ["path/to/file"], []}
iex> OptionParser.parse(["--limit", "3"], switches: [limit: :integer])
- { [limit: 3], [], [] }
+ {[limit: 3], [], []}
iex> OptionParser.parse(["--limit", "yyz"], switches: [limit: :integer])
- { [], [], [limit: "yyz"] }
+ {[], [], [limit: "yyz"]}
## Negation switches
@@ -72,12 +72,12 @@ defmodule OptionParser do
booleans and never parse the next value:
iex> OptionParser.parse(["--no-op", "path/to/file"])
- { [no_op: true], ["path/to/file"], [] }
+ {[no_op: true], ["path/to/file"], []}
In case the negated switch exists as a boolean, it sets the boolean to false:
iex> OptionParser.parse(["--no-op", "path/to/file"], switches: [op: :boolean])
- { [op: false], ["path/to/file"], [] }
+ {[op: false], ["path/to/file"], []}
"""
def parse(argv, opts \\ []) when is_list(argv) and is_list(opts) do
@@ -93,10 +93,10 @@ defmodule OptionParser do
## Example
iex> OptionParser.parse_head(["--source", "lib", "test/enum_test.exs", "--verbose"])
- { [source: "lib"], ["test/enum_test.exs", "--verbose"], [] }
+ {[source: "lib"], ["test/enum_test.exs", "--verbose"], []}
iex> OptionParser.parse_head(["--verbose", "--source", "lib", "test/enum_test.exs", "--unlock"])
- { [verbose: true, source: "lib"], ["test/enum_test.exs", "--unlock"], [] }
+ {[verbose: true, source: "lib"], ["test/enum_test.exs", "--unlock"], []}
"""
def parse_head(argv, opts \\ []) when is_list(argv) and is_list(opts) do
@@ -116,23 +116,23 @@ defmodule OptionParser do
end
defp parse(["--"|_] = value, _aliases, _switches, dict, _args, invalid, _all) do
- { Enum.reverse(dict), value, Enum.reverse(invalid) }
+ {Enum.reverse(dict), value, Enum.reverse(invalid)}
end
defp parse(["-" <> option|t], aliases, switches, dict, args, invalid, all) do
- { option, value } = split_option(option)
- { option, kinds, value } = normalize_option(option, value, switches, aliases)
+ {option, value} = split_option(option)
+ {option, kinds, value} = normalize_option(option, value, switches, aliases)
if nil?(value) do
- { value, t } =
+ {value, t} =
if :boolean in kinds do
- { true, t }
+ {true, t}
else
value_from_tail(t)
end
end
- { dict, invalid } = store_option(dict, invalid, option, value, kinds)
+ {dict, invalid} = store_option(dict, invalid, option, value, kinds)
parse(t, aliases, switches, dict, args, invalid, all)
end
@@ -142,51 +142,51 @@ defmodule OptionParser do
end
defp parse([], _, _switches, dict, args, invalid, true) do
- { Enum.reverse(dict), Enum.reverse(args), Enum.reverse(invalid) }
+ {Enum.reverse(dict), Enum.reverse(args), Enum.reverse(invalid)}
end
defp parse(value, _, _switches, dict, _args, invalid, false) do
- { Enum.reverse(dict), value, Enum.reverse(invalid) }
+ {Enum.reverse(dict), value, Enum.reverse(invalid)}
end
- defp value_from_tail(["-" <> _|_] = t), do: { true, t }
- defp value_from_tail([h|t]), do: { h, t }
- defp value_from_tail([]), do: { true, [] }
+ defp value_from_tail(["-" <> _|_] = t), do: {true, t}
+ defp value_from_tail([h|t]), do: {h, t}
+ defp value_from_tail([]), do: {true, []}
defp store_option(dict, invalid, option, value, kinds) do
- { invalid_option, value } =
+ {invalid_option, value} =
cond do
:invalid in kinds ->
- { option, value }
+ {option, value}
:boolean in kinds ->
- { nil, value in [true, "true"] }
+ {nil, value in [true, "true"]}
:integer in kinds ->
case Integer.parse(value) do
- { value, "" } -> { nil, value }
- _ -> { option, value }
+ {value, ""} -> {nil, value}
+ _ -> {option, value}
end
:float in kinds ->
case Float.parse(value) do
- { value, "" } -> { nil, value }
- _ -> { option, value }
+ {value, ""} -> {nil, value}
+ _ -> {option, value}
end
true ->
- { nil, value }
+ {nil, value}
end
if invalid_option do
- { dict, [{ option, value }|invalid] }
+ {dict, [{option, value}|invalid]}
else
- { do_store_option(dict, option, value, kinds), invalid }
+ {do_store_option(dict, option, value, kinds), invalid}
end
end
defp do_store_option(dict, option, value, kinds) do
cond do
:keep in kinds ->
- [{ option, value }|dict]
+ [{option, value}|dict]
true ->
- [{ option, value }|Keyword.delete(dict, option)]
+ [{option, value}|Keyword.delete(dict, option)]
end
end
@@ -197,30 +197,30 @@ defmodule OptionParser do
defp normalize_option(option, value, switches, aliases) do
option = get_option(option)
if alias = aliases[option] do
- normalize_option({ :default, alias }, value, switches)
+ normalize_option({:default, alias}, value, switches)
else
- { option, [:invalid], value }
+ {option, [:invalid], value}
end
end
- defp normalize_option({ :negated, option }, _value, switches) do
+ defp normalize_option({:negated, option}, _value, switches) do
kinds = List.wrap(switches[option])
if :boolean in kinds do
- { option, kinds, false }
+ {option, kinds, false}
else
- { option, [:boolean], true }
+ {option, [:boolean], true}
end
end
- defp normalize_option({ :default, option }, value, switches) do
- { option, List.wrap(switches[option]), value }
+ defp normalize_option({:default, option}, value, switches) do
+ {option, List.wrap(switches[option]), value}
end
defp split_option(option) do
case :binary.split(option, "=") do
- [h] -> { h, nil }
- [h, t] -> { h, t }
+ [h] -> {h, nil}
+ [h, t] -> {h, t}
end
end
@@ -235,10 +235,10 @@ defmodule OptionParser do
defp get_negated("no-" <> rest = option, switches) do
negated = get_option(rest)
option = if Keyword.has_key?(switches, negated), do: negated, else: get_option(option)
- { :negated, option }
+ {:negated, option}
end
defp get_negated(rest, _switches) do
- { :default, get_option(rest) }
+ {:default, get_option(rest)}
end
end
diff --git a/lib/elixir/lib/path.ex b/lib/elixir/lib/path.ex
index 6b1dbf134..38484c25a 100644
--- a/lib/elixir/lib/path.ex
+++ b/lib/elixir/lib/path.ex
@@ -149,7 +149,7 @@ defmodule Path do
@spec type(t) :: :absolute | :relative | :volumerelative
def type(name) when is_list(name) or is_binary(name) do
case :os.type() do
- { :win32, _ } -> win32_pathtype(name)
+ {:win32, _} -> win32_pathtype(name)
_ -> unix_pathtype(name)
end |> elem(0)
end
@@ -174,19 +174,19 @@ defmodule Path do
@spec relative(t) :: binary
def relative(name) do
case :os.type() do
- { :win32, _ } -> win32_pathtype(name)
+ {:win32, _} -> win32_pathtype(name)
_ -> unix_pathtype(name)
end |> elem(1) |> String.from_char_data!
end
defp unix_pathtype(<<?/, relative :: binary>>), do:
- { :absolute, relative }
+ {:absolute, relative}
defp unix_pathtype([?/|relative]), do:
- { :absolute, relative }
+ {:absolute, relative}
defp unix_pathtype([list|rest]) when is_list(list), do:
unix_pathtype(list ++ rest)
defp unix_pathtype(relative), do:
- { :relative, relative }
+ {:relative, relative}
@slash [?/, ?\\]
@@ -195,26 +195,26 @@ defmodule Path do
defp win32_pathtype([char, list|rest]) when is_list(list), do:
win32_pathtype([char|list++rest])
defp win32_pathtype(<<c1, c2, relative :: binary>>) when c1 in @slash and c2 in @slash, do:
- { :absolute, relative }
+ {:absolute, relative}
defp win32_pathtype(<<c, relative :: binary>>) when c in @slash, do:
- { :volumerelative, relative }
+ {:volumerelative, relative}
defp win32_pathtype(<<_letter, ?:, c, relative :: binary>>) when c in @slash, do:
- { :absolute, relative }
+ {:absolute, relative}
defp win32_pathtype(<<_letter, ?:, relative :: binary>>), do:
- { :volumerelative, relative }
+ {:volumerelative, relative}
defp win32_pathtype([c1, c2 | relative]) when c1 in @slash and c2 in @slash, do:
- { :absolute, relative }
+ {:absolute, relative}
defp win32_pathtype([c | relative]) when c in @slash, do:
- { :volumerelative, relative }
+ {:volumerelative, relative}
defp win32_pathtype([c1, c2, list|rest]) when is_list(list), do:
win32_pathtype([c1, c2|list++rest])
defp win32_pathtype([_letter, ?:, c | relative]) when c in @slash, do:
- { :absolute, relative }
+ {:absolute, relative}
defp win32_pathtype([_letter, ?: | relative]), do:
- { :volumerelative, relative }
+ {:volumerelative, relative}
defp win32_pathtype(relative), do:
- { :relative, relative }
+ {:relative, relative}
@doc """
Returns the given `path` relative to the given `from` path.
@@ -263,7 +263,7 @@ defmodule Path do
@spec relative_to_cwd(t) :: binary
def relative_to_cwd(path) do
case :file.get_cwd do
- { :ok, base } -> relative_to(path, String.from_char_data!(base))
+ {:ok, base} -> relative_to(path, String.from_char_data!(base))
_ -> path
end
end
diff --git a/lib/elixir/lib/process.ex b/lib/elixir/lib/process.ex
index 2afae3866..26527987a 100644
--- a/lib/elixir/lib/process.ex
+++ b/lib/elixir/lib/process.ex
@@ -117,7 +117,7 @@ defmodule Process do
It does the same as `Kernel.send/2`.
"""
@spec send(dest, msg) :: msg when
- dest: pid | port | atom | { atom, node },
+ dest: pid | port | atom | {atom, node},
msg: any
def send(dest, msg) do
:erlang.send(dest, msg)
@@ -137,12 +137,12 @@ defmodule Process do
## Examples
- iex> Process.send({ :name, :node_does_not_exist }, :hi, [:noconnect])
+ iex> Process.send({:name, :node_does_not_exist}, :hi, [:noconnect])
:noconnect
"""
@spec send(dest, msg, [option]) :: result when
- dest: pid | port | atom | { atom, node },
+ dest: pid | port | atom | {atom, node},
msg: any,
option: :noconnect | :nosuspend,
result: :ok | :noconnect | :nosuspend
@@ -457,7 +457,7 @@ defmodule Process do
nillify :erlang.process_info(pid, spec)
end
- @compile { :inline, nillify: 1 }
+ @compile {:inline, nillify: 1}
defp nillify(:undefined), do: nil
defp nillify(other), do: other
end
diff --git a/lib/elixir/lib/protocol.ex b/lib/elixir/lib/protocol.ex
index 53be038ca..a97ed16bd 100644
--- a/lib/elixir/lib/protocol.ex
+++ b/lib/elixir/lib/protocol.ex
@@ -34,7 +34,7 @@ defmodule Protocol do
defp after_defprotocol do
quote unquote: false do
# == Deprecated records handling ==
- { arg, impl } = Protocol.rec_impl_for(__MODULE__)
+ {arg, impl} = Protocol.rec_impl_for(__MODULE__)
Kernel.def impl_for(unquote(arg)) when Kernel.is_record(unquote(arg)), do: unquote(impl)
# == Deprecated records handling ==
@@ -45,12 +45,12 @@ defmodule Protocol do
#
# It simply delegates to struct_impl_for which is then
# optimized during protocol consolidation.
- Kernel.def impl_for(%{ __struct__: struct }) when :erlang.is_atom(struct) do
+ Kernel.def impl_for(%{__struct__: struct}) when :erlang.is_atom(struct) do
struct_impl_for(struct)
end
# Define the implementation for builtins.
- for { guard, mod } <- Protocol.builtin do
+ for {guard, mod} <- Protocol.builtin do
target = Module.concat(__MODULE__, mod)
Kernel.def impl_for(data) when :erlang.unquote(guard)(data) do
@@ -94,9 +94,9 @@ defmodule Protocol do
end
# Inline any and struct implementations
- @compile { :inline, any_impl_for: 0, struct_impl_for: 1, impl_for?: 1 }
+ @compile {:inline, any_impl_for: 0, struct_impl_for: 1, impl_for?: 1}
- if :code.ensure_loaded(Kernel.Typespec) == { :module, Kernel.Typespec } and
+ if :code.ensure_loaded(Kernel.Typespec) == {:module, Kernel.Typespec} and
not Kernel.Typespec.defines_type?(__MODULE__, :t, 0) do
@type t :: term
end
@@ -153,7 +153,7 @@ defmodule Protocol do
@doc false
def assert_protocol(module) do
case Code.ensure_compiled(module) do
- { :module, ^module } -> nil
+ {:module, ^module} -> nil
_ -> raise ArgumentError, message: "#{inspect module} is not loaded"
end
@@ -185,7 +185,7 @@ defmodule Protocol do
# returns the module to dispatch to.
@doc false
def rec_impl_for(current) do
- all = [Any] ++ for { _guard, mod } <- builtin, do: mod
+ all = [Any] ++ for {_guard, mod} <- builtin, do: mod
arg = quote do: arg
target = Module.concat(current, Tuple)
@@ -211,7 +211,7 @@ defmodule Protocol do
end
end
- { arg, impl_for }
+ {arg, impl_for}
end
end
@@ -219,18 +219,18 @@ defmodule Protocol.DSL do
@moduledoc false
@doc false
- defmacro def({ _, _, args }) when args == [] or is_atom(args) do
+ defmacro def({_, _, args}) when args == [] or is_atom(args) do
raise ArgumentError, message: "protocol functions expect at least one argument"
end
- defmacro def({ name, _, args }) when is_atom(name) and is_list(args) do
+ defmacro def({name, _, args}) when is_atom(name) and is_list(args) do
arity = length(args)
type_args = for _ <- :lists.seq(2, arity), do: quote(do: term)
type_args = [quote(do: t) | type_args]
call_args = for i <- :lists.seq(2, arity),
- do: { binary_to_atom(<<?x, i + 64>>), [], __MODULE__ }
+ do: {binary_to_atom(<<?x, i + 64>>), [], __MODULE__}
call_args = [quote(do: t) | call_args]
quote do
@@ -262,17 +262,17 @@ defmodule Protocol.DSL do
@doc false
def __spec__?(module, name, arity) do
case :code.ensure_loaded(Kernel.Typespec) do
- { :module, Kernel.Typespec } ->
- tuple = { name, arity }
+ {:module, Kernel.Typespec} ->
+ tuple = {name, arity}
specs = Module.get_attribute(module, :spec)
- found = for { k, v } <- specs, k == tuple do
+ found = for {k, v} <- specs, k == tuple do
Kernel.Typespec.define_callback(module, tuple, v)
true
end
found != []
- { :error, _ } ->
+ {:error, _} ->
true
end
end
diff --git a/lib/elixir/lib/protocol/consolidation.ex b/lib/elixir/lib/protocol/consolidation.ex
index ccd596d28..09d4f430b 100644
--- a/lib/elixir/lib/protocol/consolidation.ex
+++ b/lib/elixir/lib/protocol/consolidation.ex
@@ -70,7 +70,7 @@ defmodule Protocol.Consolidation do
defp list_dir(path) when is_list(path) do
case :file.list_dir(path) do
- { :ok, files } -> files
+ {:ok, files} -> files
_ -> []
end
end
@@ -85,7 +85,7 @@ defmodule Protocol.Consolidation do
defp extract_from_beam(file, callback) do
case :beam_lib.chunks(file, [:attributes]) do
- {:ok, { module, [attributes: attributes] } } ->
+ {:ok, {module, [attributes: attributes]}} ->
callback.(module, attributes)
_ ->
nil
@@ -95,7 +95,7 @@ defmodule Protocol.Consolidation do
defmacrop if_ok(expr, call) do
quote do
case unquote(expr) do
- { :ok, var } -> unquote(Macro.pipe(quote(do: var), call, 0))
+ {:ok, var} -> unquote(Macro.pipe(quote(do: var), call, 0))
other -> other
end
end
@@ -121,9 +121,9 @@ defmodule Protocol.Consolidation do
nor loads the new bytecode for the compiled module.
"""
@spec apply_to(module, [module]) ::
- { :ok, binary } |
- { :error, :not_a_protocol } |
- { :error, :no_beam_info }
+ {:ok, binary} |
+ {:error, :not_a_protocol} |
+ {:error, :no_beam_info}
def apply_to(protocol, types) when is_atom(protocol) do
raise ArgumentError, "consolidation is disabled as we can't consolidate records " <>
"and structs at once. Consolidation will be added back once " <>
@@ -136,16 +136,16 @@ defmodule Protocol.Consolidation do
# Ensure the given module is loaded and is a protocol.
defp ensure_protocol(protocol) do
case :beam_lib.chunks(beam_file(protocol), [:abstract_code, :attributes]) do
- { :ok, { ^protocol, [abstract_code: { _raw, abstract_code },
- attributes: attributes] } } ->
+ {:ok, {^protocol, [abstract_code: {_raw, abstract_code},
+ attributes: attributes]}} ->
case attributes[:protocol] do
[fallback_to_any: any, consolidated: _] ->
- { :ok, { protocol, any, abstract_code } }
+ {:ok, {protocol, any, abstract_code}}
_ ->
- { :error, :not_a_protocol }
+ {:error, :not_a_protocol}
end
_ ->
- { :error, :no_beam_info }
+ {:error, :no_beam_info}
end
end
@@ -158,23 +158,23 @@ defmodule Protocol.Consolidation do
# Change the debug information to the optimized
# impl_for/1 dispatch version.
- defp change_debug_info({ protocol, any, code }, types) do
+ defp change_debug_info({protocol, any, code}, types) do
types = if any, do: types, else: List.delete(types, Any)
- all = [Any] ++ for { _guard, mod } <- Protocol.builtin, do: mod
+ all = [Any] ++ for {_guard, mod} <- Protocol.builtin, do: mod
structs = types -- all
change_impl_for(code, protocol, types, structs, false, [])
end
- defp change_impl_for([{ :attribute, line, :protocol, opts }|t], protocol, types, structs, _, acc) do
+ defp change_impl_for([{:attribute, line, :protocol, opts}|t], protocol, types, structs, _, acc) do
opts = [fallback_to_any: opts[:fallback_to_any], consolidated: true]
change_impl_for(t, protocol, types, structs, true,
- [{ :attribute, line, :protocol, opts }|acc])
+ [{:attribute, line, :protocol, opts}|acc])
end
- defp change_impl_for([{ :function, line, :impl_for, 1, _ }|t], protocol, types, structs, is_protocol, acc) do
+ defp change_impl_for([{:function, line, :impl_for, 1, _}|t], protocol, types, structs, is_protocol, acc) do
fallback = if Any in types, do: Module.concat(protocol, Any), else: nil
- clauses = for { guard, mod } <- Protocol.builtin,
+ clauses = for {guard, mod} <- Protocol.builtin,
mod in types,
do: builtin_clause_for(mod, guard, protocol, line)
@@ -182,16 +182,16 @@ defmodule Protocol.Consolidation do
[fallback_clause_for(fallback, protocol, line)]
change_impl_for(t, protocol, types, structs, is_protocol,
- [{ :function, line, :impl_for, 1, clauses }|acc])
+ [{:function, line, :impl_for, 1, clauses}|acc])
end
- defp change_impl_for([{ :function, line, :struct_impl_for, 1, _ }|t], protocol, types, structs, is_protocol, acc) do
+ defp change_impl_for([{:function, line, :struct_impl_for, 1, _}|t], protocol, types, structs, is_protocol, acc) do
fallback = if Any in types, do: Module.concat(protocol, Any), else: nil
clauses = for struct <- structs, do: each_struct_clause_for(struct, protocol, line)
clauses = clauses ++ [fallback_clause_for(fallback, protocol, line)]
change_impl_for(t, protocol, types, structs, is_protocol,
- [{ :function, line, :struct_impl_for, 1, clauses }|acc])
+ [{:function, line, :struct_impl_for, 1, clauses}|acc])
end
defp change_impl_for([h|t], protocol, info, types, is_protocol, acc) do
@@ -200,9 +200,9 @@ defmodule Protocol.Consolidation do
defp change_impl_for([], protocol, _info, _types, is_protocol, acc) do
if is_protocol do
- { :ok, { protocol, Enum.reverse(acc) } }
+ {:ok, {protocol, Enum.reverse(acc)}}
else
- { :error, :not_a_protocol }
+ {:error, :not_a_protocol}
end
end
@@ -212,7 +212,7 @@ defmodule Protocol.Consolidation do
[[{:call, line,
{:remote, line, {:atom, line, :erlang}, {:atom, line, guard}},
[{:var, line, :x}],
- }]],
+ }]],
[{:atom, line, Module.concat(protocol, mod)}]}
end
@@ -224,7 +224,7 @@ defmodule Protocol.Consolidation do
[[{:call, line,
{:remote, line, {:atom, line, :erlang}, {:atom, line, :is_atom}},
[{:var, line, :x}],
- }]],
+ }]],
[{:call, line,
{:atom, line, :struct_impl_for},
[{:var, line, :x}]}]}
@@ -237,13 +237,13 @@ defmodule Protocol.Consolidation do
defp fallback_clause_for(value, _protocol, line) do
{:clause, line, [{:var, line, :_}], [],
- [{ :atom, line, value }]}
+ [{:atom, line, value}]}
end
# Finally compile the module and emit its bytecode.
- defp compile({ protocol, code }) do
+ defp compile({protocol, code}) do
opts = if Code.compiler_options[:debug_info], do: [:debug_info], else: []
- { :ok, ^protocol, binary, _warnings } = :compile.forms(code, [:return|opts])
- { :ok, binary }
+ {:ok, ^protocol, binary, _warnings} = :compile.forms(code, [:return|opts])
+ {:ok, binary}
end
end
diff --git a/lib/elixir/lib/range.ex b/lib/elixir/lib/range.ex
index 65162034a..51c856f27 100644
--- a/lib/elixir/lib/range.ex
+++ b/lib/elixir/lib/range.ex
@@ -3,27 +3,27 @@ defmodule Range do
Defines a Range.
"""
- @type t :: { Range, any, any }
- @type t(first, last) :: { Range, first, last }
+ @type t :: {Range, any, any}
+ @type t(first, last) :: {Range, first, last}
@doc """
Creates a new range.
"""
def new(first, last) do
- { Range, first, last }
+ {Range, first, last}
end
@doc """
Returns the first item of the range.
"""
- def first({ Range, first, _ }) do
+ def first({Range, first, _}) do
first
end
@doc """
Returns the last item of the range.
"""
- def last({ Range, _, last }) do
+ def last({Range, _, last}) do
last
end
@@ -39,7 +39,7 @@ defmodule Range do
false
"""
- def range?({ Range, _, _ }), do: true
+ def range?({Range, _, _}), do: true
def range?(_), do: false
end
@@ -60,36 +60,36 @@ defimpl Enumerable, for: Range do
reduce(first, last, acc, fun, Range.Iterator.next(first, range), last >= first)
end
- defp reduce(_x, _y, { :halt, acc }, _fun, _next, _up) do
- { :halted, acc }
+ defp reduce(_x, _y, {:halt, acc}, _fun, _next, _up) do
+ {:halted, acc}
end
- defp reduce(x, y, { :suspend, acc }, fun, next, up) do
- { :suspended, acc, &reduce(x, y, &1, fun, next, up) }
+ defp reduce(x, y, {:suspend, acc}, fun, next, up) do
+ {:suspended, acc, &reduce(x, y, &1, fun, next, up)}
end
- defp reduce(x, y, { :cont, acc }, fun, next, true) when x <= y do
+ defp reduce(x, y, {:cont, acc}, fun, next, true) when x <= y do
reduce(next.(x), y, fun.(x, acc), fun, next, true)
end
- defp reduce(x, y, { :cont, acc }, fun, next, false) when x >= y do
+ defp reduce(x, y, {:cont, acc}, fun, next, false) when x >= y do
reduce(next.(x), y, fun.(x, acc), fun, next, false)
end
- defp reduce(_, _, { :cont, acc }, _fun, _next, _up) do
- { :done, acc }
+ defp reduce(_, _, {:cont, acc}, _fun, _next, _up) do
+ {:done, acc}
end
def member?(first .. last, value) do
if first <= last do
- { :ok, first <= value and value <= last }
+ {:ok, first <= value and value <= last}
else
- { :ok, last <= value and value <= first }
+ {:ok, last <= value and value <= first}
end
end
def count(first .. _ = range) do
- { :ok, Range.Iterator.count(first, range) }
+ {:ok, Range.Iterator.count(first, range)}
end
end
diff --git a/lib/elixir/lib/record.ex b/lib/elixir/lib/record.ex
index 0b797c668..a0153926b 100644
--- a/lib/elixir/lib/record.ex
+++ b/lib/elixir/lib/record.ex
@@ -4,7 +4,7 @@ defmodule Record do
Records are simply tuples where the first element is an atom:
- iex> Record.record? { User, "jose", 27 }
+ iex> Record.record? {User, "jose", 27}
true
This module provides conveniences for working with records at
@@ -51,7 +51,7 @@ defmodule Record do
## Examples
- iex> record = { User, "jose", 27 }
+ iex> record = {User, "jose", 27}
iex> Record.record?(record, User)
true
@@ -79,7 +79,7 @@ defmodule Record do
## Examples
- iex> record = { User, "jose", 27 }
+ iex> record = {User, "jose", 27}
iex> Record.record?(record)
true
iex> tuple = {}
@@ -135,14 +135,14 @@ defmodule Record do
arities will be defined to manipulate the underlying record:
# To create records
- user() #=> { :user, "José", 25 }
- user(age: 26) #=> { :user, "José", 26 }
+ user() #=> {:user, "José", 25}
+ user(age: 26) #=> {:user, "José", 26}
# To get a field from the record
user(record, :name) #=> "José"
# To update the record
- user(record, age: 26) #=> { :user, "José", 26 }
+ user(record, age: 26) #=> {:user, "José", 26}
By default, Elixir uses the record name as the first element of
the tuple (the tag). But it can be changed to something else:
@@ -151,7 +151,7 @@ defmodule Record do
Record.defrecord :user, User, name: nil
end
- user() #=> { User, nil }
+ user() #=> {User, nil}
"""
defmacro defrecord(name, tag \\ nil, kv) do
@@ -161,7 +161,7 @@ defmodule Record do
tag = tag || name
kv = Macro.expand(kv, __ENV__)
- { fields, _types } = Record.Backend.split_fields_and_types(:defrecord, kv)
+ {fields, _types} = Record.Backend.split_fields_and_types(:defrecord, kv)
fields = Macro.escape(fields)
defmacro(unquote(name)(args \\ [])) do
@@ -184,7 +184,7 @@ defmodule Record do
tag = tag || name
kv = Macro.expand(kv, __ENV__)
- { fields, _types } = Record.Backend.split_fields_and_types(:defrecordp, kv)
+ {fields, _types} = Record.Backend.split_fields_and_types(:defrecordp, kv)
fields = Macro.escape(fields)
defmacrop(unquote(name)(args \\ [])) do
diff --git a/lib/elixir/lib/record/backend.ex b/lib/elixir/lib/record/backend.ex
index 953fa2117..f058141d2 100644
--- a/lib/elixir/lib/record/backend.ex
+++ b/lib/elixir/lib/record/backend.ex
@@ -15,16 +15,16 @@ defmodule Record.Backend do
raise ArgumentError, message: "#{tag} fields must be a keyword list, got: #{Macro.to_string other}"
end
- defp split_fields_and_types(tag, [{ field, { :::, _, [default, type] }}|t], fields, types) do
- split_fields_and_types(tag, t, [{ field, default }|fields], [{ field, type }|types])
+ defp split_fields_and_types(tag, [{field, {:::, _, [default, type]}}|t], fields, types) do
+ split_fields_and_types(tag, t, [{field, default}|fields], [{field, type}|types])
end
- defp split_fields_and_types(tag, [{ field, default }|t], fields, types) when is_atom(field) do
- split_fields_and_types(tag, t, [{ field, default }|fields], [{ field, quote(do: term) }|types])
+ defp split_fields_and_types(tag, [{field, default}|t], fields, types) when is_atom(field) do
+ split_fields_and_types(tag, t, [{field, default}|fields], [{field, quote(do: term)}|types])
end
defp split_fields_and_types(tag, [field|t], fields, types) when is_atom(field) do
- split_fields_and_types(tag, t, [{ field, nil }|fields], [{ field, quote(do: term) }|types])
+ split_fields_and_types(tag, t, [{field, nil}|fields], [{field, quote(do: term)}|types])
end
defp split_fields_and_types(tag, [other|_], _fields, _types) do
@@ -32,7 +32,7 @@ defmodule Record.Backend do
end
defp split_fields_and_types(_tag, [], fields, types) do
- { :lists.reverse(fields), :lists.reverse(types) }
+ {:lists.reverse(fields), :lists.reverse(types)}
end
@doc """
@@ -82,26 +82,26 @@ defmodule Record.Backend do
def create(atom, fields, keyword, caller) do
in_match = caller.in_match?
- { match, remaining } =
- Enum.map_reduce(fields, keyword, fn({ field, default }, each_keyword) ->
+ {match, remaining} =
+ Enum.map_reduce(fields, keyword, fn({field, default}, each_keyword) ->
new_fields =
case Keyword.has_key?(each_keyword, field) do
true -> Keyword.get(each_keyword, field)
false ->
case in_match do
- true -> { :_, [], nil }
+ true -> {:_, [], nil}
false -> Macro.escape(default)
end
end
- { new_fields, Keyword.delete(each_keyword, field) }
+ {new_fields, Keyword.delete(each_keyword, field)}
end)
case remaining do
[] ->
- { :{}, [], [atom|match] }
+ {:{}, [], [atom|match]}
_ ->
- keys = for { key, _ } <- remaining, do: key
+ keys = for {key, _} <- remaining, do: key
raise ArgumentError, message: "record #{inspect atom} does not have the key: #{inspect hd(keys)}"
end
end
@@ -114,7 +114,7 @@ defmodule Record.Backend do
raise ArgumentError, message: "cannot invoke update style macro inside match"
end
- Enum.reduce keyword, var, fn({ key, value }, acc) ->
+ Enum.reduce keyword, var, fn({key, value}, acc) ->
index = find_index(fields, key, 0)
if index do
quote do
@@ -140,7 +140,7 @@ defmodule Record.Backend do
end
end
- defp find_index([{ k, _ }|_], k, i), do: i + 2
- defp find_index([{ _, _ }|t], k, i), do: find_index(t, k, i + 1)
+ defp find_index([{k, _}|_], k, i), do: i + 2
+ defp find_index([{_, _}|t], k, i), do: find_index(t, k, i + 1)
defp find_index([], _k, _i), do: nil
end
diff --git a/lib/elixir/lib/record/deprecated.ex b/lib/elixir/lib/record/deprecated.ex
index 85a730fb1..24a4ebd5d 100644
--- a/lib/elixir/lib/record/deprecated.ex
+++ b/lib/elixir/lib/record/deprecated.ex
@@ -22,7 +22,7 @@ defmodule Record.Deprecated do
end
end
- defp record_check!([{ field, { :::, _, [_, _] }}|_]) when is_atom(field) do
+ defp record_check!([{field, {:::, _, [_, _]}}|_]) when is_atom(field) do
raise ArgumentError, message: "typespecs are not supported inlined with defrecord, " <>
"please use record_type instead"
end
@@ -32,7 +32,7 @@ defmodule Record.Deprecated do
def defrecordp(name, tag, fields) do
case recordp_split(fields, [], [], false) do
- { :ok, fields, types, def_type } ->
+ {:ok, fields, types, def_type} ->
types = Macro.escape(types)
# bind_quoted isn't available when bootstrapping record
@@ -41,7 +41,7 @@ defmodule Record.Deprecated do
if def_type do
type = binary_to_atom(atom_to_binary(name) <> "_t")
- @typep unquote(type)() :: { unquote(tag || name), unquote_splicing(types) }
+ @typep unquote(type)() :: {unquote(tag || name), unquote_splicing(types)}
end
end
@@ -62,8 +62,8 @@ defmodule Record.Deprecated do
end
end
- defp recordp_split([{ field, { :::, _, [default, type] }}|t], defaults, types, _) do
- recordp_split(t, [{ field, default }|defaults], [type|types], true)
+ defp recordp_split([{field, {:::, _, [default, type]}}|t], defaults, types, _) do
+ recordp_split(t, [{field, default}|defaults], [type|types], true)
end
defp recordp_split([other|t], defaults, types, def_type) do
@@ -71,7 +71,7 @@ defmodule Record.Deprecated do
end
defp recordp_split([], defaults, types, def_type) do
- { :ok, :lists.reverse(defaults), :lists.reverse(types), def_type }
+ {:ok, :lists.reverse(defaults), :lists.reverse(types), def_type}
end
defp recordp_split(_, _, _, _) do
@@ -105,8 +105,8 @@ defmodule Record.Deprecated do
def deftypes(values, types, env) do
types = types || []
values = for value <- values do
- { name, default } = convert_value(value)
- { name, default, find_spec(types, name) }
+ {name, default} = convert_value(value)
+ {name, default, find_spec(types, name)}
end
contents = [
@@ -116,7 +116,7 @@ defmodule Record.Deprecated do
# We need to handle bootstraping
cond do
- :code.ensure_loaded(Kernel.Typespec) != { :module, Kernel.Typespec } ->
+ :code.ensure_loaded(Kernel.Typespec) != {:module, Kernel.Typespec} ->
nil
env == Macro.Env ->
Module.eval_quoted(env, contents, [], [])
@@ -129,8 +129,8 @@ defmodule Record.Deprecated do
when is_atom(name) and is_list(values) and is_atom(tag) do
escaped = for value <- values do
- { key, value } = convert_value(value)
- { key, Macro.escape(value) }
+ {key, value} = convert_value(value)
+ {key, Macro.escape(value)}
end
tag = tag || name
@@ -161,7 +161,7 @@ defmodule Record.Deprecated do
end
def __on_definition__(env, kind, name, args, _guards, _body) do
- tuple = { name, length(args) }
+ tuple = {name, length(args)}
module = env.module
functions = Module.get_attribute(module, :record_optimizable)
@@ -191,7 +191,7 @@ defmodule Record.Deprecated do
in_match = caller.in_match?
has_underscore_value = Keyword.has_key?(keyword, :_)
- underscore_value = Keyword.get(keyword, :_, { :_, [], nil })
+ underscore_value = Keyword.get(keyword, :_, {:_, [], nil})
keyword = Keyword.delete keyword, :_
iterator = fn({field, default}, each_keyword) ->
@@ -205,16 +205,16 @@ defmodule Record.Deprecated do
end
end
- { new_fields, Keyword.delete(each_keyword, field) }
+ {new_fields, Keyword.delete(each_keyword, field)}
end
- { match, remaining } = :lists.mapfoldl(iterator, keyword, fields)
+ {match, remaining} = :lists.mapfoldl(iterator, keyword, fields)
case remaining do
[] ->
- { :{}, [], [atom|match] }
+ {:{}, [], [atom|match]}
_ ->
- keys = for { key, _ } <- remaining, do: key
+ keys = for {key, _} <- remaining, do: key
raise ArgumentError, message: "record #{inspect atom} does not have the key: #{inspect hd(keys)}"
end
end
@@ -239,7 +239,7 @@ defmodule Record.Deprecated do
raise ArgumentError, message: "cannot invoke update style macro inside match context"
end
- Enum.reduce keyword, var, fn({ key, value }, acc) ->
+ Enum.reduce keyword, var, fn({key, value}, acc) ->
index = find_index(fields, key, 0)
if index do
quote do
@@ -265,7 +265,7 @@ defmodule Record.Deprecated do
## Function generation
defp reflection(values) do
- quoted = for { k, _ } <- values do
+ quoted = for {k, _} <- values do
index = find_index(values, k, 0)
quote do
def __record__(:index, unquote(k)), do: unquote(index + 1)
@@ -290,23 +290,23 @@ defmodule Record.Deprecated do
end
defp initializer(values) do
- defaults = for { _, value } <- values, do: value
+ defaults = for {_, value} <- values, do: value
# For each value, define a piece of code that will receive
# an ordered dict of options (opts) and it will try to fetch
# the given key from the ordered dict, falling back to the
# default value if one does not exist.
- atom_selective = for { k, v } <- values, do: initialize_lookup(k, v)
- string_selective = for { k, v } <- values, do: initialize_lookup(atom_to_binary(k), v)
+ atom_selective = for {k, v} <- values, do: initialize_lookup(k, v)
+ string_selective = for {k, v} <- values, do: initialize_lookup(atom_to_binary(k), v)
quote do
@doc false
def new(), do: new([])
@doc false
- def new([]), do: { __MODULE__, unquote_splicing(defaults) }
- def new([{key, _}|_] = opts) when is_atom(key), do: { __MODULE__, unquote_splicing(atom_selective) }
- def new([{key, _}|_] = opts) when is_binary(key), do: { __MODULE__, unquote_splicing(string_selective) }
+ def new([]), do: {__MODULE__, unquote_splicing(defaults)}
+ def new([{key, _}|_] = opts) when is_atom(key), do: {__MODULE__, unquote_splicing(atom_selective)}
+ def new([{key, _}|_] = opts) when is_binary(key), do: {__MODULE__, unquote_splicing(string_selective)}
end
end
@@ -320,9 +320,9 @@ defmodule Record.Deprecated do
end
defp conversions(values) do
- sorted = for { k, _ } <- values do
+ sorted = for {k, _} <- values do
index = find_index(values, k, 0)
- { k, quote(do: :erlang.element(unquote(index + 2), record)) }
+ {k, quote(do: :erlang.element(unquote(index + 2), record))}
end
quote do
@@ -333,11 +333,11 @@ defmodule Record.Deprecated do
end
end
- defp accessors([{ :__exception__, _ }|t], 1) do
+ defp accessors([{:__exception__, _}|t], 1) do
accessors(t, 2)
end
- defp accessors([{ key, _default }|t], i) do
+ defp accessors([{key, _default}|t], i) do
update = binary_to_atom "update_" <> atom_to_binary(key)
contents = quote do
@@ -374,8 +374,8 @@ defmodule Record.Deprecated do
string_fields =
for {key, _default} <- values, do: updater_lookup(atom_to_binary(key), key, values)
- atom_contents = quote do: { __MODULE__, unquote_splicing(atom_fields) }
- string_contents = quote do: { __MODULE__, unquote_splicing(string_fields) }
+ atom_contents = quote do: {__MODULE__, unquote_splicing(atom_fields)}
+ string_contents = quote do: {__MODULE__, unquote_splicing(string_fields)}
quote do
@doc false
@@ -407,8 +407,8 @@ defmodule Record.Deprecated do
quote do
@record_optimized true
@record_optimizable []
- @before_compile { unquote(__MODULE__), :__before_compile__ }
- @on_definition { unquote(__MODULE__), :__on_definition__ }
+ @before_compile {unquote(__MODULE__), :__before_compile__}
+ @on_definition {unquote(__MODULE__), :__on_definition__}
end
end
@@ -419,12 +419,12 @@ defmodule Record.Deprecated do
## Types/specs generation
defp core_specs(values) do
- types = for { _, _, spec } <- values, do: spec
- options = for { k, _, v } <- values, do: { k, v }
+ types = for {_, _, spec} <- values, do: spec
+ options = for {k, _, v} <- values, do: {k, v}
quote do
unless Kernel.Typespec.defines_type?(__MODULE__, :t, 0) do
- @type t :: { __MODULE__, unquote_splicing(types) }
+ @type t :: {__MODULE__, unquote_splicing(types)}
end
unless Kernel.Typespec.defines_type?(__MODULE__, :options, 0) do
@@ -441,11 +441,11 @@ defmodule Record.Deprecated do
end
end
- defp accessor_specs([{ :__exception__, _, _ }|t], 1, acc) do
+ defp accessor_specs([{:__exception__, _, _}|t], 1, acc) do
accessor_specs(t, 2, acc)
end
- defp accessor_specs([{ key, _default, spec }|t], i, acc) do
+ defp accessor_specs([{key, _default, spec}|t], i, acc) do
update = binary_to_atom "update_" <> atom_to_binary(key)
contents = quote do
@@ -464,15 +464,15 @@ defmodule Record.Deprecated do
defp is_keyword(list) when is_list(list), do: :lists.all(&is_keyword_tuple/1, list)
defp is_keyword(_), do: false
- defp is_keyword_tuple({ x, _ }) when is_atom(x), do: true
+ defp is_keyword_tuple({x, _}) when is_atom(x), do: true
defp is_keyword_tuple(_), do: false
- defp convert_value(atom) when is_atom(atom), do: { atom, nil }
+ defp convert_value(atom) when is_atom(atom), do: {atom, nil}
- defp convert_value({ atom, other }) when is_atom(atom), do:
- { atom, check_value(atom, other) }
+ defp convert_value({atom, other}) when is_atom(atom), do:
+ {atom, check_value(atom, other)}
- defp convert_value({ field, _ }), do:
+ defp convert_value({field, _}), do:
raise(ArgumentError, message: "record field name has to be an atom, got #{inspect field}")
defp check_value(atom, other) when is_list(other) do
@@ -486,8 +486,8 @@ defmodule Record.Deprecated do
end
defp check_value(atom, other) when is_function(other) do
- unless :erlang.fun_info(other, :env) == { :env, [] } and
- :erlang.fun_info(other, :type) == { :type, :external } do
+ unless :erlang.fun_info(other, :env) == {:env, []} and
+ :erlang.fun_info(other, :type) == {:type, :external} do
raise ArgumentError, message: "record field default value #{inspect atom} can only contain " <>
"functions that point to an existing &Mod.fun/arity"
end
@@ -500,12 +500,12 @@ defmodule Record.Deprecated do
defp check_value(_atom, other), do: other
- defp find_index([{ k, _ }|_], k, i), do: i
- defp find_index([{ _, _ }|t], k, i), do: find_index(t, k, i + 1)
+ defp find_index([{k, _}|_], k, i), do: i
+ defp find_index([{_, _}|t], k, i), do: find_index(t, k, i + 1)
defp find_index([], _k, _i), do: nil
defp find_spec(types, name) do
- matches = for { k, v } <- types, name == k, do: v
+ matches = for {k, v} <- types, name == k, do: v
case matches do
[h|_] -> h
_ -> quote do: term
@@ -517,7 +517,7 @@ defmodule Record.DSL do
@moduledoc false
defmacro record_type(opts) when is_list(opts) do
- escaped = for { k, v } <- opts, do: { k, Macro.escape(v) }
+ escaped = for {k, v} <- opts, do: {k, Macro.escape(v)}
quote do
@record_types Keyword.merge(@record_types || [], unquote(escaped))
diff --git a/lib/elixir/lib/record/extractor.ex b/lib/elixir/lib/record/extractor.ex
index 0a89e08eb..505d6b037 100644
--- a/lib/elixir/lib/record/extractor.ex
+++ b/lib/elixir/lib/record/extractor.ex
@@ -21,7 +21,7 @@ defmodule Record.Extractor do
[app|path] = :filename.split(List.from_char_data!(file))
case :code.lib_dir(list_to_atom(app)) do
- { :error, _ } ->
+ {:error, _} ->
raise ArgumentError, message: "lib file #{file} could not be found"
libpath ->
extract_record name, :filename.join([libpath|path])
@@ -41,7 +41,7 @@ defmodule Record.Extractor do
# Parse the given file and extract all existent records.
defp extract_records(form) do
- for { :attribute, _, :record, record } <- form, do: record
+ for {:attribute, _, :record, record} <- form, do: record
end
# Read a file and return its abstract syntax form that also
@@ -49,7 +49,7 @@ defmodule Record.Extractor do
# by using Erlang's epp_dodger.
defp read_file(file) do
case :epp_dodger.quick_parse_file(file) do
- { :ok, form } ->
+ {:ok, form} ->
form
other ->
raise "error parsing file #{file}, got: #{inspect(other)}"
@@ -59,34 +59,34 @@ defmodule Record.Extractor do
# Parse a tuple with name and fields and returns a
# list of tuples where the first element is the field
# and the second is its default value.
- defp parse_record({ _name, fields }, form) do
- cons = List.foldr fields, { nil, 0 }, fn f, acc ->
- { :cons, 0, parse_field(f), acc }
+ defp parse_record({_name, fields}, form) do
+ cons = List.foldr fields, {nil, 0}, fn f, acc ->
+ {:cons, 0, parse_field(f), acc}
end
eval_record(cons, form)
end
- defp parse_field({ :typed_record_field, record_field, _type }) do
+ defp parse_field({:typed_record_field, record_field, _type}) do
parse_field(record_field)
end
- defp parse_field({ :record_field, _, key }) do
- { :tuple, 0, [key, {:atom, 0, :undefined}] }
+ defp parse_field({:record_field, _, key}) do
+ {:tuple, 0, [key, {:atom, 0, :undefined}]}
end
- defp parse_field({ :record_field, _, key, value }) do
- { :tuple, 0, [key, value] }
+ defp parse_field({:record_field, _, key, value}) do
+ {:tuple, 0, [key, value]}
end
defp eval_record(cons, form) do
form = form ++
- [ { :function, 0, :hello, 0, [
- { :clause, 0, [], [], [ cons ] } ] } ]
+ [ {:function, 0, :hello, 0, [
+ {:clause, 0, [], [], [ cons ]} ]} ]
- { :function, 0, :hello, 0, [
- { :clause, 0, [], [], [ record_ast ] } ] } = :erl_expand_records.module(form, []) |> List.last
+ {:function, 0, :hello, 0, [
+ {:clause, 0, [], [], [ record_ast ]} ]} = :erl_expand_records.module(form, []) |> List.last
- { :value, record, _ } = :erl_eval.expr(record_ast, [])
+ {:value, record, _} = :erl_eval.expr(record_ast, [])
record
end
end
diff --git a/lib/elixir/lib/regex.ex b/lib/elixir/lib/regex.ex
index 2d56c5fb1..753075c71 100644
--- a/lib/elixir/lib/regex.ex
+++ b/lib/elixir/lib/regex.ex
@@ -63,7 +63,7 @@ defmodule Regex do
"""
defrecordp :regex, Regex, [:re_pattern, :source, :options]
- @type t :: { Regex, term, binary, binary, [atom] | nil }
+ @type t :: {Regex, term, binary, binary, [atom] | nil}
defexception CompileError, message: "regex could not be compiled"
@@ -74,8 +74,8 @@ defmodule Regex do
representing the same regex options given to the `~r` sigil,
or a list of options, as expected by the [Erlang `re` docs](http://www.erlang.org/doc/man/re.html).
- It returns `{ :ok, regex }` in case of success,
- `{ :error, reason }` otherwise.
+ It returns `{:ok, regex}` in case of success,
+ `{:error, reason}` otherwise.
## Examples
@@ -86,13 +86,13 @@ defmodule Regex do
{:error, {'nothing to repeat', 0}}
"""
- @spec compile(binary, binary | [term]) :: { :ok, t } | { :error, any }
+ @spec compile(binary, binary | [term]) :: {:ok, t} | {:error, any}
def compile(source, options \\ "")
def compile(source, options) when is_binary(options) do
case translate_options(options) do
- { :error, rest } ->
- { :error, { :invalid_option, rest } }
+ {:error, rest} ->
+ {:error, {:invalid_option, rest}}
translated_options ->
compile(source, translated_options, options)
@@ -105,8 +105,8 @@ defmodule Regex do
defp compile(source, opts, doc_opts) when is_binary(source) do
case :re.compile(source, opts) do
- { :ok, re_pattern } ->
- { :ok, regex(re_pattern: re_pattern, source: source, options: doc_opts) }
+ {:ok, re_pattern} ->
+ {:ok, regex(re_pattern: re_pattern, source: source, options: doc_opts)}
error ->
error
end
@@ -118,8 +118,8 @@ defmodule Regex do
"""
def compile!(source, options \\ "") do
case compile(source, options) do
- { :ok, regex } -> regex
- { :error, { reason, at } } -> raise Regex.CompileError, message: "#{reason} at position #{at}"
+ {:ok, regex} -> regex
+ {:error, {reason, at}} -> raise Regex.CompileError, message: "#{reason} at position #{at}"
end
end
@@ -136,7 +136,7 @@ defmodule Regex do
"""
def match?(regex(re_pattern: compiled), string) when is_binary(string) do
- :re.run(string, compiled, [{ :capture, :none }]) == :match
+ :re.run(string, compiled, [{:capture, :none}]) == :match
end
@doc """
@@ -189,10 +189,10 @@ defmodule Regex do
others -> others
end
- case :re.run(string, compiled, [{ :capture, captures, return }]) do
+ case :re.run(string, compiled, [{:capture, captures, return}]) do
:nomatch -> nil
:match -> []
- { :match, results } -> results
+ {:match, results} -> results
end
end
@@ -204,10 +204,10 @@ defmodule Regex do
## Examples
iex> Regex.named_captures(~r/c(?<foo>d)/, "abcd")
- %{ "foo" => "d" }
+ %{"foo" => "d"}
iex> Regex.named_captures(~r/a(?<foo>b)c(?<bar>d)/, "abcd")
- %{ "bar" => "d", "foo" => "b" }
+ %{"bar" => "d", "foo" => "b"}
iex> Regex.named_captures(~r/a(?<foo>b)c(?<bar>d)/, "efgh")
nil
@@ -263,7 +263,7 @@ defmodule Regex do
"""
def names(regex(re_pattern: re_pattern)) do
- { :namelist, names } = :re.inspect(re_pattern, :namelist)
+ {:namelist, names} = :re.inspect(re_pattern, :namelist)
names
end
@@ -309,11 +309,11 @@ defmodule Regex do
others -> others
end
- options = [{ :capture, captures, return }, :global]
+ options = [{:capture, captures, return}, :global]
case :re.run(string, compiled, options) do
:match -> []
:nomatch -> []
- { :match, results } -> results
+ {:match, results} -> results
end
end
@@ -400,11 +400,11 @@ defmodule Regex do
def replace(regex(re_pattern: compiled), string, replacement, options) when is_binary(string) do
opts = if Keyword.get(options, :global) != false, do: [:global], else: []
- opts = [{ :return, :binary }|opts]
+ opts = [{:return, :binary}|opts]
:re.replace(string, compiled, replacement, opts)
end
- { :ok, pattern } = :re.compile(~S"[.^$*+?()[{\\\|\s#]", [:unicode])
+ {:ok, pattern} = :re.compile(~S"[.^$*+?()[{\\\|\s#]", [:unicode])
@escape_pattern pattern
@doc ~S"""
@@ -421,7 +421,7 @@ defmodule Regex do
"""
@spec escape(String.t) :: String.t
def escape(string) when is_binary(string) do
- :re.replace(string, @escape_pattern, "\\\\&", [:global, { :return, :binary }])
+ :re.replace(string, @escape_pattern, "\\\\&", [:global, {:return, :binary}])
end
# Helpers
@@ -451,5 +451,5 @@ defmodule Regex do
defp translate_options(<<?s, t :: binary>>), do: [:dotall, {:newline, :anycrlf}|translate_options(t)]
defp translate_options(<<?m, t :: binary>>), do: [:multiline|translate_options(t)]
defp translate_options(<<>>), do: []
- defp translate_options(rest), do: { :error, rest }
+ defp translate_options(rest), do: {:error, rest}
end
diff --git a/lib/elixir/lib/set.ex b/lib/elixir/lib/set.ex
index 5dfee8bd7..c0140ea73 100644
--- a/lib/elixir/lib/set.ex
+++ b/lib/elixir/lib/set.ex
@@ -98,8 +98,8 @@ defmodule Set do
if target1 == target2 do
target1.difference(set1, set2)
else
- target2.reduce(set2, { :cont, set1 }, fn v, acc ->
- { :cont, target1.delete(acc, v) }
+ target2.reduce(set2, {:cont, set1}, fn v, acc ->
+ {:cont, target1.delete(acc, v)}
end) |> elem(1)
end
end
@@ -128,10 +128,10 @@ defmodule Set do
if target1 == target2 do
target1.disjoint?(set1, set2)
else
- target2.reduce(set2, { :cont, true }, fn member, acc ->
+ target2.reduce(set2, {:cont, true}, fn member, acc ->
case target1.member?(set1, member) do
- false -> { :cont, acc }
- _ -> { :halt, false }
+ false -> {:cont, acc}
+ _ -> {:halt, false}
end
end) |> elem(1)
end
@@ -200,8 +200,8 @@ defmodule Set do
if target1 == target2 do
target1.intersection(set1, set2)
else
- target1.reduce(set1, { :cont, Collectable.empty(set1) }, fn v, acc ->
- { :cont, if(target2.member?(set2, v), do: target1.put(acc, v), else: acc) }
+ target1.reduce(set1, {:cont, Collectable.empty(set1)}, fn v, acc ->
+ {:cont, if(target2.member?(set2, v), do: target1.put(acc, v), else: acc)}
end) |> elem(1)
end
end
@@ -317,17 +317,17 @@ defmodule Set do
if target1 == target2 do
target1.union(set1, set2)
else
- target2.reduce(set2, { :cont, set1 }, fn v, acc ->
- { :cont, target1.put(acc, v) }
+ target2.reduce(set2, {:cont, set1}, fn v, acc ->
+ {:cont, target1.put(acc, v)}
end) |> elem(1)
end
end
defp do_subset?(target1, target2, set1, set2) do
- target1.reduce(set1, { :cont, true }, fn member, acc ->
+ target1.reduce(set1, {:cont, true}, fn member, acc ->
case target2.member?(set2, member) do
- true -> { :cont, acc }
- _ -> { :halt, false }
+ true -> {:cont, acc}
+ _ -> {:halt, false}
end
end) |> elem(1)
end
diff --git a/lib/elixir/lib/stream.ex b/lib/elixir/lib/stream.ex
index 206d1be12..89ab98807 100644
--- a/lib/elixir/lib/stream.ex
+++ b/lib/elixir/lib/stream.ex
@@ -96,40 +96,40 @@ defmodule Stream do
def reduce(lazy, acc, fun) do
do_reduce(lazy, acc, fn x, [acc] ->
- { reason, acc } = fun.(x, acc)
- { reason, [acc] }
+ {reason, acc} = fun.(x, acc)
+ {reason, [acc]}
end)
end
def count(_lazy) do
- { :error, __MODULE__ }
+ {:error, __MODULE__}
end
def member?(_lazy, _value) do
- { :error, __MODULE__ }
+ {:error, __MODULE__}
end
defp do_reduce(Lazy[enum: enum, funs: funs, accs: accs, done: done], acc, fun) do
composed = :lists.foldl(fn fun, acc -> fun.(acc) end, fun, funs)
do_each(&Enumerable.reduce(enum, &1, composed),
- done && { done, fun }, :lists.reverse(accs), acc)
+ done && {done, fun}, :lists.reverse(accs), acc)
end
- defp do_each(reduce, done, accs, { command, acc }) do
- case reduce.({ command, [acc|accs] }) do
- { :suspended, [acc|accs], continuation } ->
- { :suspended, acc, &do_each(continuation, done, accs, &1) }
- { :halted, [acc|_] } ->
- { :halted, acc }
- { :done, [acc|_] = accs } ->
+ defp do_each(reduce, done, accs, {command, acc}) do
+ case reduce.({command, [acc|accs]}) do
+ {:suspended, [acc|accs], continuation} ->
+ {:suspended, acc, &do_each(continuation, done, accs, &1)}
+ {:halted, [acc|_]} ->
+ {:halted, acc}
+ {:done, [acc|_] = accs} ->
case done do
nil ->
- { :done, acc }
- { done, fun } ->
+ {:done, acc}
+ {done, fun} ->
case done.(fun).(accs) do
- { :cont, [acc|_] } -> { :done, acc }
- { :halt, [acc|_] } -> { :halted, acc }
- { :suspend, [acc|_] } -> { :suspended, acc, &({ :done, elem(&1, 1) }) }
+ {:cont, [acc|_]} -> {:done, acc}
+ {:halt, [acc|_]} -> {:halted, acc}
+ {:suspend, [acc|_]} -> {:suspended, acc, &({:done, elem(&1, 1)})}
end
end
end
@@ -154,8 +154,8 @@ defmodule Stream do
defmacrop cont_with_acc(f, entry, h, n, t) do
quote do
- { reason, [h|t] } = unquote(f).(unquote(entry), [unquote(h)|unquote(t)])
- { reason, [h,unquote(n)|t] }
+ {reason, [h|t]} = unquote(f).(unquote(entry), [unquote(h)|unquote(t)])
+ {reason, [h,unquote(n)|t]}
end
end
@@ -199,14 +199,14 @@ defmodule Stream do
@spec chunk(Enumerable.t, non_neg_integer, non_neg_integer, Enumerable.t | nil) :: Enumerable.t
def chunk(enum, n, step, pad \\ nil) when n > 0 and step > 0 do
limit = :erlang.max(n, step)
- lazy enum, { [], 0 },
+ lazy enum, {[], 0},
fn(f1) -> R.chunk(n, step, limit, f1) end,
fn(f1) -> &do_chunk(&1, n, pad, f1) end
end
- defp do_chunk(acc(h, { buffer, count } = old, t) = acc, n, pad, f1) do
+ defp do_chunk(acc(h, {buffer, count} = old, t) = acc, n, pad, f1) do
if nil?(pad) || count == 0 do
- { :cont, acc }
+ {:cont, acc}
else
buffer = :lists.reverse(buffer) ++ Enum.take(pad, n - count)
cont_with_acc(f1, buffer, h, old, t)
@@ -233,10 +233,10 @@ defmodule Stream do
end
defp do_chunk_by(acc(_, nil, _) = acc, _f1) do
- { :cont, acc }
+ {:cont, acc}
end
- defp do_chunk_by(acc(h, { buffer, _ }, t), f1) do
+ defp do_chunk_by(acc(h, {buffer, _}, t), f1) do
cont_with_acc(f1, :lists.reverse(buffer), h, nil, t)
end
@@ -267,12 +267,12 @@ defmodule Stream do
def drop(enum, n) when n < 0 do
n = abs(n)
- lazy enum, { 0, [], [] }, fn(f1) ->
+ lazy enum, {0, [], []}, fn(f1) ->
fn
- entry, [h, { count, buf1, [] } | t] ->
+ entry, [h, {count, buf1, []} | t] ->
do_drop(:cont, n, entry, h, count, buf1, [], t)
- entry, [h, { count, buf1, [next|buf2] } | t] ->
- { reason, [h|t] } = f1.(next, [h|t])
+ entry, [h, {count, buf1, [next|buf2]} | t] ->
+ {reason, [h|t]} = f1.(next, [h|t])
do_drop(reason, n, entry, h, count, buf1, buf2, t)
end
end
@@ -282,9 +282,9 @@ defmodule Stream do
buf1 = [entry|buf1]
count = count + 1
if count == n do
- { reason, [h, { 0, [], :lists.reverse(buf1) }|t] }
+ {reason, [h, {0, [], :lists.reverse(buf1)}|t]}
else
- { reason, [h, { count, buf1, buf2 }|t] }
+ {reason, [h, {count, buf1, buf2}|t]}
end
end
@@ -344,7 +344,7 @@ defmodule Stream do
"""
@spec flat_map(Enumerable.t, (element -> Enumerable.t)) :: Enumerable.t
def flat_map(enum, mapper) do
- transform(enum, nil, fn val, nil -> { mapper.(val), nil } end)
+ transform(enum, nil, fn val, nil -> {mapper.(val), nil} end)
end
@doc """
@@ -393,29 +393,29 @@ defmodule Stream do
end
defp do_into(enum, collectable, transform, acc, fun) do
- { initial, into } = Collectable.into(collectable)
+ {initial, into} = Collectable.into(collectable)
composed = fn x, [acc|collectable] ->
- collectable = into.(collectable, { :cont, transform.(x) })
- { reason, acc } = fun.(x, acc)
- { reason, [acc|collectable] }
+ collectable = into.(collectable, {:cont, transform.(x)})
+ {reason, acc} = fun.(x, acc)
+ {reason, [acc|collectable]}
end
do_into(&Enumerable.reduce(enum, &1, composed), initial, into, acc)
end
- defp do_into(reduce, collectable, into, { command, acc }) do
+ defp do_into(reduce, collectable, into, {command, acc}) do
try do
- reduce.({ command, [acc|collectable] })
+ reduce.({command, [acc|collectable]})
catch
kind, reason ->
stacktrace = System.stacktrace
into.(collectable, :halt)
:erlang.raise(kind, reason, stacktrace)
else
- { :suspended, [acc|collectable], continuation } ->
- { :suspended, acc, &do_into(continuation, collectable, into, &1) }
- { reason, [acc|collectable] } ->
+ {:suspended, [acc|collectable], continuation} ->
+ {:suspended, acc, &do_into(continuation, collectable, into, &1)}
+ {reason, [acc|collectable]} ->
into.(collectable, :done)
- { reason, acc }
+ {reason, acc}
end
end
@@ -471,7 +471,7 @@ defmodule Stream do
"""
@spec run(Enumerable.t) :: :ok
def run(stream) do
- Enumerable.reduce(stream, { :cont, nil }, fn(_, _) -> { :cont, nil } end)
+ Enumerable.reduce(stream, {:cont, nil}, fn(_, _) -> {:cont, nil} end)
:ok
end
@@ -546,15 +546,15 @@ defmodule Stream do
end
defp do_take(enum, n, acc, f) do
- { _, { _count, buf1, buf2 } } =
- Enumerable.reduce(enum, { :cont, { 0, [], [] } }, fn
- entry, { count, buf1, buf2 } ->
+ {_, {_count, buf1, buf2}} =
+ Enumerable.reduce(enum, {:cont, {0, [], []}}, fn
+ entry, {count, buf1, buf2} ->
buf1 = [entry|buf1]
count = count + 1
if count == n do
- { :cont, { 0, [], buf1 } }
+ {:cont, {0, [], buf1}}
else
- { :cont, { count, buf1, buf2 } }
+ {:cont, {count, buf1, buf2}}
end
end)
@@ -626,14 +626,14 @@ defmodule Stream do
iex> enum = 1..100
iex> n = 3
iex> stream = Stream.transform(enum, 0, fn i, acc ->
- ...> if acc < n, do: { [i], acc + 1 }, else: { :halt, acc }
+ ...> if acc < n, do: {[i], acc + 1}, else: {:halt, acc}
...> end)
iex> Enum.to_list(stream)
[1,2,3]
"""
@spec transform(Enumerable.t, acc, fun) :: Enumerable.t when
- fun: (element, acc -> { Enumerable.t, acc } | { :halt, acc }),
+ fun: (element, acc -> {Enumerable.t, acc} | {:halt, acc}),
acc: any
def transform(enum, acc, reducer) do
&do_transform(enum, acc, reducer, &1, &2)
@@ -647,27 +647,27 @@ defmodule Stream do
end
defp do_transform(user_acc, user, fun, next_acc, next, inner_acc, inner) do
- case next.({ :cont, next_acc }) do
- { :suspended, [val|next_acc], next } ->
+ case next.({:cont, next_acc}) do
+ {:suspended, [val|next_acc], next} ->
try do
user.(val, user_acc)
catch
kind, reason ->
- next.({ :halt, next_acc })
+ next.({:halt, next_acc})
:erlang.raise(kind, reason, :erlang.get_stacktrace)
else
- { [], user_acc } ->
+ {[], user_acc} ->
do_transform(user_acc, user, fun, next_acc, next, inner_acc, inner)
- { list, user_acc } when is_list(list) ->
+ {list, user_acc} when is_list(list) ->
do_list_transform(user_acc, user, fun, next_acc, next, inner_acc, inner, &Enumerable.List.reduce(list, &1, fun))
- { :halt, _user_acc } ->
- next.({ :halt, next_acc })
- { :halted, elem(inner_acc, 1) }
- { other, user_acc } ->
+ {:halt, _user_acc} ->
+ next.({:halt, next_acc})
+ {:halted, elem(inner_acc, 1)}
+ {other, user_acc} ->
do_other_transform(user_acc, user, fun, next_acc, next, inner_acc, inner, &Enumerable.reduce(other, &1, inner))
end
- { reason, _ } ->
- { reason, elem(inner_acc, 1) }
+ {reason, _} ->
+ {reason, elem(inner_acc, 1)}
end
end
@@ -676,16 +676,16 @@ defmodule Stream do
reduce.(inner_acc)
catch
kind, reason ->
- next.({ :halt, next_acc })
+ next.({:halt, next_acc})
:erlang.raise(kind, reason, :erlang.get_stacktrace)
else
- { :done, acc } ->
- do_transform(user_acc, user, fun, next_acc, next, { :cont, acc }, inner)
- { :halted, acc } ->
- next.({ :halt, next_acc })
- { :halted, acc }
- { :suspended, acc, c } ->
- { :suspended, acc, &do_list_transform(user_acc, user, fun, next_acc, next, &1, inner, c) }
+ {:done, acc} ->
+ do_transform(user_acc, user, fun, next_acc, next, {:cont, acc}, inner)
+ {:halted, acc} ->
+ next.({:halt, next_acc})
+ {:halted, acc}
+ {:suspended, acc, c} ->
+ {:suspended, acc, &do_list_transform(user_acc, user, fun, next_acc, next, &1, inner, c)}
end
end
@@ -693,29 +693,29 @@ defmodule Stream do
try do
reduce.(inner_acc)
catch
- { :stream_transform, h } ->
- next.({ :halt, next_acc })
- { :halted, h }
+ {:stream_transform, h} ->
+ next.({:halt, next_acc})
+ {:halted, h}
kind, reason ->
- next.({ :halt, next_acc })
+ next.({:halt, next_acc})
:erlang.raise(kind, reason, :erlang.get_stacktrace)
else
- { _, acc } ->
- do_transform(user_acc, user, fun, next_acc, next, { :cont, acc }, inner)
- { :suspended, acc, c } ->
- { :suspended, acc, &do_other_transform(user_acc, user, fun, next_acc, next, &1, inner, c) }
+ {_, acc} ->
+ do_transform(user_acc, user, fun, next_acc, next, {:cont, acc}, inner)
+ {:suspended, acc, c} ->
+ {:suspended, acc, &do_other_transform(user_acc, user, fun, next_acc, next, &1, inner, c)}
end
end
defp do_transform_each(x, acc, f) do
case f.(x, acc) do
- { :halt, h } -> throw({ :stream_transform, h })
- { _, _ } = o -> o
+ {:halt, h} -> throw({:stream_transform, h})
+ {_, _} = o -> o
end
end
defp do_transform_step(x, acc) do
- { :suspend, [x|acc] }
+ {:suspend, [x|acc]}
end
@doc """
@@ -815,19 +815,19 @@ defmodule Stream do
right_fun = &Enumerable.reduce(right, &1, step)
# Return a function as a lazy enumerator.
- &do_zip([{ left_fun, [] }, { right_fun, [] }], &1, &2)
+ &do_zip([{left_fun, []}, {right_fun, []}], &1, &2)
end
- defp do_zip(zips, { :halt, acc }, _fun) do
+ defp do_zip(zips, {:halt, acc}, _fun) do
do_zip_close(zips)
- { :halted, acc }
+ {:halted, acc}
end
- defp do_zip(zips, { :suspend, acc }, fun) do
- { :suspended, acc, &do_zip(zips, &1, fun) }
+ defp do_zip(zips, {:suspend, acc}, fun) do
+ {:suspended, acc, &do_zip(zips, &1, fun)}
end
- defp do_zip(zips, { :cont, acc }, callback) do
+ defp do_zip(zips, {:cont, acc}, callback) do
try do
do_zip(zips, acc, callback, [], [])
catch
@@ -835,36 +835,36 @@ defmodule Stream do
do_zip_close(zips)
:erlang.raise(kind, reason, :erlang.get_stacktrace)
else
- { :next, buffer, acc } ->
+ {:next, buffer, acc} ->
do_zip(buffer, acc, callback)
- { :done, _ } = o ->
+ {:done, _} = o ->
o
end
end
- defp do_zip([{ fun, fun_acc }|t], acc, callback, list, buffer) do
- case fun.({ :cont, fun_acc }) do
- { :suspended, [i|fun_acc], fun } ->
- do_zip(t, acc, callback, [i|list], [{ fun, fun_acc }|buffer])
- { _, _ } ->
+ defp do_zip([{fun, fun_acc}|t], acc, callback, list, buffer) do
+ case fun.({:cont, fun_acc}) do
+ {:suspended, [i|fun_acc], fun} ->
+ do_zip(t, acc, callback, [i|list], [{fun, fun_acc}|buffer])
+ {_, _} ->
do_zip_close(:lists.reverse(buffer) ++ t)
- { :done, acc }
+ {:done, acc}
end
end
defp do_zip([], acc, callback, list, buffer) do
zipped = list_to_tuple(:lists.reverse(list))
- { :next, :lists.reverse(buffer), callback.(zipped, acc) }
+ {:next, :lists.reverse(buffer), callback.(zipped, acc)}
end
defp do_zip_close([]), do: :ok
- defp do_zip_close([{ fun, acc }|t]) do
- fun.({ :halt, acc })
+ defp do_zip_close([{fun, acc}|t]) do
+ fun.({:halt, acc})
do_zip_close(t)
end
defp do_zip_step(x, acc) do
- { :suspend, [x|acc] }
+ {:suspend, [x|acc]}
end
## Sources
@@ -888,22 +888,22 @@ defmodule Stream do
end
end
- defp do_cycle(_reduce, _cycle, { :halt, acc }) do
- { :halted, acc }
+ defp do_cycle(_reduce, _cycle, {:halt, acc}) do
+ {:halted, acc}
end
- defp do_cycle(reduce, cycle, { :suspend, acc }) do
- { :suspended, acc, &do_cycle(reduce, cycle, &1) }
+ defp do_cycle(reduce, cycle, {:suspend, acc}) do
+ {:suspended, acc, &do_cycle(reduce, cycle, &1)}
end
defp do_cycle(reduce, cycle, acc) do
case reduce.(acc) do
- { :done, acc } ->
- do_cycle(cycle, cycle, { :cont, acc })
- { :halted, acc } ->
- { :halted, acc }
- { :suspended, acc, continuation } ->
- { :suspended, acc, &do_cycle(continuation, cycle, &1) }
+ {:done, acc} ->
+ do_cycle(cycle, cycle, {:cont, acc})
+ {:halted, acc} ->
+ {:halted, acc}
+ {:suspended, acc, continuation} ->
+ {:suspended, acc, &do_cycle(continuation, cycle, &1)}
end
end
@@ -919,12 +919,12 @@ defmodule Stream do
"""
@spec iterate(element, (element -> element)) :: Enumerable.t
def iterate(start_value, next_fun) do
- unfold({ :ok, start_value}, fn
- { :ok, value } ->
- { value, { :next, value } }
- { :next, value } ->
+ unfold({:ok, start_value}, fn
+ {:ok, value} ->
+ {value, {:next, value}}
+ {:next, value} ->
next = next_fun.(value)
- { next, { :next, next } }
+ {next, {:next, next}}
end)
end
@@ -942,15 +942,15 @@ defmodule Stream do
&do_repeatedly(generator_fun, &1, &2)
end
- defp do_repeatedly(generator_fun, { :suspend, acc }, fun) do
- { :suspended, acc, &do_repeatedly(generator_fun, &1, fun) }
+ defp do_repeatedly(generator_fun, {:suspend, acc}, fun) do
+ {:suspended, acc, &do_repeatedly(generator_fun, &1, fun)}
end
- defp do_repeatedly(_generator_fun, { :halt, acc }, _fun) do
- { :halted, acc }
+ defp do_repeatedly(_generator_fun, {:halt, acc}, _fun) do
+ {:halted, acc}
end
- defp do_repeatedly(generator_fun, { :cont, acc }, fun) do
+ defp do_repeatedly(generator_fun, {:cont, acc}, fun) do
do_repeatedly(generator_fun, fun.(generator_fun.(), acc), fun)
end
@@ -974,32 +974,32 @@ defmodule Stream do
Stream.resource(fn -> File.open!("sample") end,
fn file ->
case IO.read(file, :line) do
- data when is_binary(data) -> { data, file }
+ data when is_binary(data) -> {data, file}
_ -> nil
end
end,
fn file -> File.close(file) end)
"""
- @spec resource((() -> acc), (acc -> { element, acc } | nil), (acc -> term)) :: Enumerable.t
+ @spec resource((() -> acc), (acc -> {element, acc} | nil), (acc -> term)) :: Enumerable.t
def resource(start_fun, next_fun, after_fun) do
&do_resource(start_fun.(), next_fun, &1, &2, after_fun)
end
- defp do_resource(next_acc, next_fun, { :suspend, acc }, fun, after_fun) do
- { :suspended, acc, &do_resource(next_acc, next_fun, &1, fun, after_fun) }
+ defp do_resource(next_acc, next_fun, {:suspend, acc}, fun, after_fun) do
+ {:suspended, acc, &do_resource(next_acc, next_fun, &1, fun, after_fun)}
end
- defp do_resource(next_acc, _next_fun, { :halt, acc }, _fun, after_fun) do
+ defp do_resource(next_acc, _next_fun, {:halt, acc}, _fun, after_fun) do
after_fun.(next_acc)
- { :halted, acc }
+ {:halted, acc}
end
- defp do_resource(next_acc, next_fun, { :cont, acc }, fun, after_fun) do
+ defp do_resource(next_acc, next_fun, {:cont, acc}, fun, after_fun) do
try do
case next_fun.(next_acc) do
nil -> nil
- { v, next_acc } -> { fun.(v, acc), next_acc }
+ {v, next_acc} -> {fun.(v, acc), next_acc}
end
catch
kind, reason ->
@@ -1008,8 +1008,8 @@ defmodule Stream do
else
nil ->
after_fun.(next_acc)
- { :done, acc }
- { acc, next_acc } ->
+ {:done, acc}
+ {acc, next_acc} ->
do_resource(next_acc, next_fun, acc, fun, after_fun)
end
end
@@ -1027,29 +1027,29 @@ defmodule Stream do
[5, 4, 3, 2, 1]
"""
- @spec unfold(acc, (acc -> { element, acc } | nil)) :: Enumerable.t
+ @spec unfold(acc, (acc -> {element, acc} | nil)) :: Enumerable.t
def unfold(next_acc, next_fun) do
&do_unfold(next_acc, next_fun, &1, &2)
end
- defp do_unfold(next_acc, next_fun, { :suspend, acc }, fun) do
- { :suspended, acc, &do_unfold(next_acc, next_fun, &1, fun) }
+ defp do_unfold(next_acc, next_fun, {:suspend, acc}, fun) do
+ {:suspended, acc, &do_unfold(next_acc, next_fun, &1, fun)}
end
- defp do_unfold(_next_acc, _next_fun, { :halt, acc }, _fun) do
- { :halted, acc }
+ defp do_unfold(_next_acc, _next_fun, {:halt, acc}, _fun) do
+ {:halted, acc}
end
- defp do_unfold(next_acc, next_fun, { :cont, acc }, fun) do
+ defp do_unfold(next_acc, next_fun, {:cont, acc}, fun) do
case next_fun.(next_acc) do
- nil -> { :done, acc }
- { v, next_acc } -> do_unfold(next_acc, next_fun, fun.(v, acc), fun)
+ nil -> {:done, acc}
+ {v, next_acc} -> do_unfold(next_acc, next_fun, fun.(v, acc), fun)
end
end
## Helpers
- @compile { :inline, lazy: 2, lazy: 3, lazy: 4 }
+ @compile {:inline, lazy: 2, lazy: 3, lazy: 4}
defp lazy(Lazy[funs: funs] = lazy, fun),
do: lazy.funs([fun|funs])
diff --git a/lib/elixir/lib/stream/reducers.ex b/lib/elixir/lib/stream/reducers.ex
index 69b7b1d1d..18659600f 100644
--- a/lib/elixir/lib/stream/reducers.ex
+++ b/lib/elixir/lib/stream/reducers.ex
@@ -4,22 +4,22 @@ defmodule Stream.Reducers do
defmacro chunk(n, step, limit, f \\ nil) do
quote do
- fn entry, acc(h, { buffer, count }, t) ->
+ fn entry, acc(h, {buffer, count}, t) ->
buffer = [entry|buffer]
count = count + 1
new =
if count >= unquote(limit) do
left = count - unquote(step)
- { Enum.take(buffer, left), left }
+ {Enum.take(buffer, left), left}
else
- { buffer, count }
+ {buffer, count}
end
if count == unquote(n) do
cont_with_acc(unquote(f), :lists.reverse(buffer), h, new, t)
else
- { :cont, acc(h, new, t) }
+ {:cont, acc(h, new, t)}
end
end
end
@@ -28,15 +28,15 @@ defmodule Stream.Reducers do
defmacro chunk_by(callback, f \\ nil) do
quote do
fn
- entry, acc(h, { buffer, value }, t) ->
+ entry, acc(h, {buffer, value}, t) ->
new_value = unquote(callback).(entry)
if new_value == value do
- { :cont, acc(h, { [entry|buffer], value }, t) }
+ {:cont, acc(h, {[entry|buffer], value}, t)}
else
- cont_with_acc(unquote(f), :lists.reverse(buffer), h, { [entry], new_value }, t)
+ cont_with_acc(unquote(f), :lists.reverse(buffer), h, {[entry], new_value}, t)
end
entry, acc(h, nil, t) ->
- { :cont, acc(h, { [entry], unquote(callback).(entry) }, t) }
+ {:cont, acc(h, {[entry], unquote(callback).(entry)}, t)}
end
end
end
@@ -45,7 +45,7 @@ defmodule Stream.Reducers do
quote do
fn
_entry, acc(h, n, t) when n > 0 ->
- { :cont, acc(h, n-1, t) }
+ {:cont, acc(h, n-1, t)}
entry, acc(h, n, t) ->
cont_with_acc(unquote(f), entry, h, n, t)
end
@@ -56,7 +56,7 @@ defmodule Stream.Reducers do
quote do
fn entry, acc(h, bool, t) = orig ->
if bool and unquote(callback).(entry) do
- { :cont, orig }
+ {:cont, orig}
else
cont_with_acc(unquote(f), entry, h, false, t)
end
@@ -70,7 +70,7 @@ defmodule Stream.Reducers do
if unquote(callback).(entry) do
cont(unquote(f), entry, acc)
else
- { :cont, acc }
+ {:cont, acc}
end
end
end
@@ -82,7 +82,7 @@ defmodule Stream.Reducers do
if unquote(filter).(entry) do
cont(unquote(f), unquote(mapper).(entry), acc)
else
- { :cont, acc }
+ {:cont, acc}
end
end
end
@@ -102,7 +102,7 @@ defmodule Stream.Reducers do
unless unquote(callback).(entry) do
cont(unquote(f), entry, acc)
else
- { :cont, acc }
+ {:cont, acc}
end
end
end
@@ -112,10 +112,10 @@ defmodule Stream.Reducers do
quote do
fn
entry, acc(h, :first, t) ->
- cont_with_acc(unquote(f), entry, h, { :ok, entry }, t)
- entry, acc(h, { :ok, acc }, t) ->
+ cont_with_acc(unquote(f), entry, h, {:ok, entry}, t)
+ entry, acc(h, {:ok, acc}, t) ->
value = unquote(callback).(entry, acc)
- cont_with_acc(unquote(f), value, h, { :ok, value }, t)
+ cont_with_acc(unquote(f), value, h, {:ok, value}, t)
end
end
end
@@ -135,7 +135,7 @@ defmodule Stream.Reducers do
if n >= 1 do
cont_with_acc(unquote(f), entry, h, n-1, t)
else
- { :halt, orig }
+ {:halt, orig}
end
end
end
@@ -148,7 +148,7 @@ defmodule Stream.Reducers do
when n === unquote(nth) ->
cont_with_acc(unquote(f), entry, h, 1, t)
entry, acc(h, n, t) ->
- { :cont, acc(h, n+1, t) }
+ {:cont, acc(h, n+1, t)}
end
end
end
@@ -159,7 +159,7 @@ defmodule Stream.Reducers do
if unquote(callback).(entry) do
cont(unquote(f), entry, acc)
else
- { :halt, acc }
+ {:halt, acc}
end
end
end
@@ -170,7 +170,7 @@ defmodule Stream.Reducers do
fn(entry, acc(h, prev, t) = acc) ->
value = unquote(callback).(entry)
if :lists.member(value, prev) do
- { :cont, acc }
+ {:cont, acc}
else
cont_with_acc(unquote(f), entry, h, [value|prev], t)
end
@@ -181,7 +181,7 @@ defmodule Stream.Reducers do
defmacro with_index(f \\ nil) do
quote do
fn(entry, acc(h, counter, t)) ->
- cont_with_acc(unquote(f), { entry, counter }, h, counter + 1, t)
+ cont_with_acc(unquote(f), {entry, counter}, h, counter + 1, t)
end
end
end
diff --git a/lib/elixir/lib/string.ex b/lib/elixir/lib/string.ex
index 276db170e..509af4f0b 100644
--- a/lib/elixir/lib/string.ex
+++ b/lib/elixir/lib/string.ex
@@ -308,7 +308,7 @@ defmodule String do
"""
@spec capitalize(t) :: t
def capitalize(string) when is_binary(string) do
- { char, rest } = String.Unicode.titlecase_once(string)
+ {char, rest} = String.Unicode.titlecase_once(string)
char <> downcase(rest)
end
@@ -640,10 +640,10 @@ defmodule String do
## Examples
iex> String.next_codepoint("josé")
- { "j", "osé" }
+ {"j", "osé"}
"""
- @compile { :inline, next_codepoint: 1 }
+ @compile {:inline, next_codepoint: 1}
@spec next_codepoint(t) :: {codepoint, t} | nil
defdelegate next_codepoint(string), to: String.Unicode
@@ -731,11 +731,11 @@ defmodule String do
## Examples
iex> String.next_grapheme("josé")
- { "j", "osé" }
+ {"j", "osé"}
"""
- @compile { :inline, next_grapheme: 1 }
- @spec next_grapheme(t) :: { grapheme, t } | nil
+ @compile {:inline, next_grapheme: 1}
+ @spec next_grapheme(t) :: {grapheme, t} | nil
defdelegate next_grapheme(string), to: String.Graphemes
@doc """
@@ -754,7 +754,7 @@ defmodule String do
@spec first(t) :: grapheme | nil
def first(string) do
case next_grapheme(string) do
- { char, _ } -> char
+ {char, _} -> char
nil -> nil
end
end
@@ -1166,13 +1166,13 @@ defmodule String do
## Examples
iex> String.from_char_data([0x00E6, 0x00DF])
- { :ok, "æß" }
+ {:ok, "æß"}
iex> String.from_char_data([0x0061, "bc"])
- { :ok, "abc" }
+ {:ok, "abc"}
"""
- @spec from_char_data(char_data) :: { :ok, String.t } | { :error, binary, binary } | { :incomplete, binary, binary }
+ @spec from_char_data(char_data) :: {:ok, String.t} | {:error, binary, binary} | {:incomplete, binary, binary}
def from_char_data(binary) when is_binary(binary) do
binary
end
@@ -1180,12 +1180,12 @@ defmodule String do
def from_char_data(list) when is_list(list) do
case :unicode.characters_to_binary(list) do
result when is_binary(result) ->
- { :ok, result }
+ {:ok, result}
- { :error, _, _ } = error ->
+ {:error, _, _} = error ->
error
- { :incomplete, _, _ } = incomplete ->
+ {:incomplete, _, _} = incomplete ->
incomplete
end
end
@@ -1215,10 +1215,10 @@ defmodule String do
result when is_binary(result) ->
result
- { :error, encoded, rest } ->
+ {:error, encoded, rest} ->
raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :invalid
- { :incomplete, encoded, rest } ->
+ {:incomplete, encoded, rest} ->
raise UnicodeConversionError, encoded: encoded, rest: rest, kind: :incomplete
end
end
diff --git a/lib/elixir/lib/string_io.ex b/lib/elixir/lib/string_io.ex
index 4366b3e74..c66f1c21d 100644
--- a/lib/elixir/lib/string_io.ex
+++ b/lib/elixir/lib/string_io.ex
@@ -4,7 +4,7 @@ defmodule StringIO do
## Examples
- iex> { :ok, pid } = StringIO.open("foo")
+ iex> {:ok, pid} = StringIO.open("foo")
iex> IO.read(pid, 2)
"fo"
@@ -23,22 +23,22 @@ defmodule StringIO do
## Examples
- iex> { :ok, pid } = StringIO.open("foo")
+ iex> {:ok, pid} = StringIO.open("foo")
iex> IO.gets(pid, ">")
"foo"
iex> StringIO.contents(pid)
- { "", "" }
+ {"", ""}
- iex> { :ok, pid } = StringIO.open("foo", capture_prompt: true)
+ iex> {:ok, pid} = StringIO.open("foo", capture_prompt: true)
iex> IO.gets(pid, ">")
"foo"
iex> StringIO.contents(pid)
- { "", ">" }
+ {"", ">"}
"""
- @spec open(binary, Keyword.t) :: { :ok, pid }
+ @spec open(binary, Keyword.t) :: {:ok, pid}
def open(string, options \\ []) when is_binary(string) do
- :gen_server.start_link(__MODULE__, { string, options }, [])
+ :gen_server.start_link(__MODULE__, {string, options}, [])
end
@doc """
@@ -46,13 +46,13 @@ defmodule StringIO do
## Examples
- iex> { :ok, pid } = StringIO.open("in")
+ iex> {:ok, pid} = StringIO.open("in")
iex> IO.write(pid, "out")
iex> StringIO.contents(pid)
- { "in", "out" }
+ {"in", "out"}
"""
- @spec contents(pid) :: { binary, binary }
+ @spec contents(pid) :: {binary, binary}
def contents(pid) when is_pid(pid) do
:gen_server.call(pid, :contents)
end
@@ -62,27 +62,27 @@ defmodule StringIO do
## Examples
- iex> { :ok, pid } = StringIO.open("in")
+ iex> {:ok, pid} = StringIO.open("in")
iex> IO.write(pid, "out")
iex> StringIO.close(pid)
- { :ok, { "in", "out" } }
+ {:ok, {"in", "out"}}
"""
- @spec close(pid) :: { :ok, { binary, binary } }
+ @spec close(pid) :: {:ok, {binary, binary}}
def close(pid) when is_pid(pid) do
:gen_server.call(pid, :close)
end
## callbacks
- def init({ string, options }) do
+ def init({string, options}) do
capture_prompt = options[:capture_prompt] || false
- { :ok, state(input: string, capture_prompt: capture_prompt) }
+ {:ok, state(input: string, capture_prompt: capture_prompt)}
end
- def handle_info({ :io_request, from, reply_as, req }, s) do
+ def handle_info({:io_request, from, reply_as, req}, s) do
s = io_request(from, reply_as, req, s)
- { :noreply, s }
+ {:noreply, s}
end
def handle_info(msg, s) do
@@ -90,11 +90,11 @@ defmodule StringIO do
end
def handle_call(:contents, _from, state(input: input, output: output) = s) do
- { :reply, { input, output }, s }
+ {:reply, {input, output}, s}
end
def handle_call(:close, _from, state(input: input, output: output) = s) do
- { :stop, :normal, { :ok, { input, output } }, s }
+ {:stop, :normal, {:ok, {input, output}}, s}
end
def handle_call(request, from, s) do
@@ -102,78 +102,78 @@ defmodule StringIO do
end
defp io_request(from, reply_as, req, s) do
- { reply, s } = io_request(req, s)
+ {reply, s} = io_request(req, s)
io_reply(from, reply_as, to_reply(reply))
s
end
- defp io_request({ :put_chars, chars }, state(output: output) = s) do
- { :ok, state(s, output: << output :: binary, String.from_char_data!(chars) :: binary >>) }
+ defp io_request({:put_chars, chars}, state(output: output) = s) do
+ {:ok, state(s, output: << output :: binary, String.from_char_data!(chars) :: binary >>)}
end
- defp io_request({ :put_chars, m, f, as }, state(output: output) = s) do
+ defp io_request({:put_chars, m, f, as}, state(output: output) = s) do
chars = apply(m, f, as)
- { :ok, state(s, output: << output :: binary, String.from_char_data!(chars) :: binary >>) }
+ {:ok, state(s, output: << output :: binary, String.from_char_data!(chars) :: binary >>)}
end
- defp io_request({ :put_chars, _encoding, chars }, s) do
- io_request({ :put_chars, chars }, s)
+ defp io_request({:put_chars, _encoding, chars}, s) do
+ io_request({:put_chars, chars}, s)
end
- defp io_request({ :put_chars, _encoding, mod, func, args }, s) do
- io_request({ :put_chars, mod, func, args }, s)
+ defp io_request({:put_chars, _encoding, mod, func, args}, s) do
+ io_request({:put_chars, mod, func, args}, s)
end
- defp io_request({ :get_chars, prompt, n }, s) when n >= 0 do
- io_request({ :get_chars, :latin1, prompt, n }, s)
+ defp io_request({:get_chars, prompt, n}, s) when n >= 0 do
+ io_request({:get_chars, :latin1, prompt, n}, s)
end
- defp io_request({ :get_chars, encoding, prompt, n }, s) when n >= 0 do
+ defp io_request({:get_chars, encoding, prompt, n}, s) when n >= 0 do
get_chars(encoding, prompt, n, s)
end
- defp io_request({ :get_line, prompt }, s) do
- io_request({ :get_line, :latin1, prompt }, s)
+ defp io_request({:get_line, prompt}, s) do
+ io_request({:get_line, :latin1, prompt}, s)
end
- defp io_request({ :get_line, encoding, prompt }, s) do
+ defp io_request({:get_line, encoding, prompt}, s) do
get_line(encoding, prompt, s)
end
- defp io_request({ :get_until, prompt, mod, fun, args }, s) do
- io_request({ :get_until, :latin1, prompt, mod, fun, args }, s)
+ defp io_request({:get_until, prompt, mod, fun, args}, s) do
+ io_request({:get_until, :latin1, prompt, mod, fun, args}, s)
end
- defp io_request({ :get_until, encoding, prompt, mod, fun, args }, s) do
+ defp io_request({:get_until, encoding, prompt, mod, fun, args}, s) do
get_until(encoding, prompt, mod, fun, args, s)
end
- defp io_request({ :get_password, encoding }, s) do
+ defp io_request({:get_password, encoding}, s) do
get_line(encoding, "", s)
end
- defp io_request({ :setopts, _opts }, s) do
- { { :error, :enotsup }, s }
+ defp io_request({:setopts, _opts}, s) do
+ {{:error, :enotsup}, s}
end
defp io_request(:getopts, s) do
- { { :ok, [binary: true, encoding: :unicode] }, s }
+ {{:ok, [binary: true, encoding: :unicode]}, s}
end
- defp io_request({ :get_geometry, :columns }, s) do
- { { :error, :enotsup }, s }
+ defp io_request({:get_geometry, :columns}, s) do
+ {{:error, :enotsup}, s}
end
- defp io_request({ :get_geometry, :rows }, s) do
- { { :error, :enotsup }, s }
+ defp io_request({:get_geometry, :rows}, s) do
+ {{:error, :enotsup}, s}
end
- defp io_request({ :requests, reqs }, s) do
- io_requests(reqs, { :ok, s })
+ defp io_request({:requests, reqs}, s) do
+ io_requests(reqs, {:ok, s})
end
defp io_request(_, s) do
- { { :error, :request }, s }
+ {{:error, :request}, s}
end
## get_chars
@@ -181,42 +181,42 @@ defmodule StringIO do
defp get_chars(encoding, prompt, n,
state(input: input, output: output, capture_prompt: capture_prompt) = s) do
case do_get_chars(input, encoding, n) do
- { :error, _ } = error ->
- { error, s }
- { result, input } ->
+ {:error, _} = error ->
+ {error, s}
+ {result, input} ->
if capture_prompt do
output = << output :: binary, String.from_char_data!(prompt) :: binary >>
end
- { result, state(s, input: input, output: output) }
+ {result, state(s, input: input, output: output)}
end
end
defp do_get_chars("", _encoding, _n) do
- { :eof, "" }
+ {:eof, ""}
end
defp do_get_chars(input, :latin1, n) when byte_size(input) < n do
- { input, "" }
+ {input, ""}
end
defp do_get_chars(input, :latin1, n) do
<< chars :: [ binary, size(n) ], rest :: binary >> = input
- { chars, rest }
+ {chars, rest}
end
defp do_get_chars(input, encoding, n) do
try do
case :file_io_server.count_and_find(input, n, encoding) do
- { buf_count, split_pos } when buf_count < n or split_pos == :none ->
- { input, "" }
- { _buf_count, split_pos } ->
+ {buf_count, split_pos} when buf_count < n or split_pos == :none ->
+ {input, ""}
+ {_buf_count, split_pos} ->
<< chars :: [ binary, size(split_pos) ], rest :: binary >> = input
- { chars, rest }
+ {chars, rest}
end
catch
:exit, :invalid_unicode ->
- { :error, :invalid_unicode }
+ {:error, :invalid_unicode}
end
end
@@ -225,29 +225,29 @@ defmodule StringIO do
defp get_line(encoding, prompt,
state(input: input, output: output, capture_prompt: capture_prompt) = s) do
case :unicode.characters_to_list(input, encoding) do
- { :error, _, _ } ->
- { { :error, :collect_line }, s }
- { :incomplete, _, _ } ->
- { { :error, :collect_line }, s }
+ {:error, _, _} ->
+ {{:error, :collect_line}, s}
+ {:incomplete, _, _} ->
+ {{:error, :collect_line}, s}
chars ->
- { result, input } = do_get_line(chars, encoding)
+ {result, input} = do_get_line(chars, encoding)
if capture_prompt do
output = << output :: binary, String.from_char_data!(prompt) :: binary >>
end
- { result, state(s, input: input, output: output) }
+ {result, state(s, input: input, output: output)}
end
end
defp do_get_line('', _encoding) do
- { :eof, "" }
+ {:eof, ""}
end
defp do_get_line(chars, encoding) do
- { line, rest } = collect_line(chars)
- { :unicode.characters_to_binary(line, encoding),
- :unicode.characters_to_binary(rest, encoding) }
+ {line, rest} = collect_line(chars)
+ {:unicode.characters_to_binary(line, encoding),
+ :unicode.characters_to_binary(rest, encoding)}
end
## get_until
@@ -255,12 +255,12 @@ defmodule StringIO do
defp get_until(encoding, prompt, mod, fun, args,
state(input: input, output: output, capture_prompt: capture_prompt) = s) do
case :unicode.characters_to_list(input, encoding) do
- { :error, _, _ } ->
- { :error, s }
- { :incomplete, _, _ } ->
- { :error, s }
+ {:error, _, _} ->
+ {:error, s}
+ {:incomplete, _, _} ->
+ {:error, s}
chars ->
- { result, input, count } = do_get_until(chars, encoding, mod, fun, args)
+ {result, input, count} = do_get_until(chars, encoding, mod, fun, args)
if capture_prompt do
output = << output :: binary, :binary.copy(String.from_char_data!(prompt), count) :: binary >>
@@ -272,7 +272,7 @@ defmodule StringIO do
_ -> :unicode.characters_to_binary(input, encoding)
end
- { result, state(s, input: input, output: output) }
+ {result, state(s, input: input, output: output)}
end
end
@@ -280,30 +280,30 @@ defmodule StringIO do
defp do_get_until('', encoding, mod, fun, args, continuation, count) do
case apply(mod, fun, [continuation, :eof | args]) do
- { :done, result, rest } ->
- { result, rest, count + 1 }
- { :more, next_continuation } ->
+ {:done, result, rest} ->
+ {result, rest, count + 1}
+ {:more, next_continuation} ->
do_get_until('', encoding, mod, fun, args, next_continuation, count + 1)
end
end
defp do_get_until(chars, encoding, mod, fun, args, continuation, count) do
- { line, rest } = collect_line(chars)
+ {line, rest} = collect_line(chars)
case apply(mod, fun, [continuation, line | args]) do
- { :done, result, rest1 } ->
+ {:done, result, rest1} ->
unless rest1 == :eof do
rest = rest1 ++ rest
end
- { result, rest, count + 1 }
- { :more, next_continuation } ->
+ {result, rest, count + 1}
+ {:more, next_continuation} ->
do_get_until(rest, encoding, mod, fun, args, next_continuation, count + 1)
end
end
## io_requests
- defp io_requests([r|rs], { :ok, s }) do
+ defp io_requests([r|rs], {:ok, s}) do
io_requests(rs, io_request(r, s))
end
@@ -318,15 +318,15 @@ defmodule StringIO do
end
defp collect_line([], stack) do
- { :lists.reverse(stack), [] }
+ {:lists.reverse(stack), []}
end
defp collect_line([?\r, ?\n | rest], stack) do
- { :lists.reverse([?\n|stack]), rest }
+ {:lists.reverse([?\n|stack]), rest}
end
defp collect_line([?\n | rest], stack) do
- { :lists.reverse([?\n|stack]), rest }
+ {:lists.reverse([?\n|stack]), rest}
end
defp collect_line([h|t], stack) do
@@ -334,7 +334,7 @@ defmodule StringIO do
end
defp io_reply(from, reply_as, reply) do
- send from, { :io_reply, reply_as, reply }
+ send from, {:io_reply, reply_as, reply}
end
defp to_reply(list) when is_list(list), do: String.from_char_data!(list)
diff --git a/lib/elixir/lib/supervisor/behaviour.ex b/lib/elixir/lib/supervisor/behaviour.ex
index 6b0c0cd99..5d623f679 100644
--- a/lib/elixir/lib/supervisor/behaviour.ex
+++ b/lib/elixir/lib/supervisor/behaviour.ex
@@ -25,7 +25,7 @@ defmodule Supervisor.Behaviour do
end
end
- { :ok, pid } = :supervisor.start_link(MyServer, [])
+ {:ok, pid} = :supervisor.start_link(MyServer, [])
"""
@@ -87,7 +87,7 @@ defmodule Supervisor.Behaviour do
maxR = Keyword.get(options, :max_restarts, 5)
maxS = Keyword.get(options, :max_seconds, 5)
- { :ok, { { strategy, maxR, maxS }, children } }
+ {:ok, {{strategy, maxR, maxS}, children}}
end
@child_doc """
@@ -122,7 +122,7 @@ defmodule Supervisor.Behaviour do
* `:transient` - the child process is restarted only if it
terminates abnormally, i.e. with another exit reason than
- `:normal`, `:shutdown` or `{ :shutdown, term }`;
+ `:normal`, `:shutdown` or `{:shutdown, term}`;
## Shutdown values
@@ -178,7 +178,7 @@ defmodule Supervisor.Behaviour do
restart = Keyword.get(options, :restart, :permanent)
shutdown = Keyword.get(options, :shutdown, 5000)
- { id, { module, function, args },
- restart, shutdown, type, modules }
+ {id, {module, function, args},
+ restart, shutdown, type, modules}
end
end
diff --git a/lib/elixir/lib/system.ex b/lib/elixir/lib/system.ex
index 9473daf17..713fa8b09 100644
--- a/lib/elixir/lib/system.ex
+++ b/lib/elixir/lib/system.ex
@@ -11,7 +11,7 @@ defmodule System do
defp read_stripped(path) do
case :file.read_file(path) do
- { :ok, binary } ->
+ {:ok, binary} ->
strip_re(binary, "^\s+|\s+$")
_ -> ""
end
@@ -31,7 +31,7 @@ defmodule System do
defmacrop get_describe do
dirpath = :filename.join(__DIR__, "../../../.git")
case :file.read_file_info(dirpath) do
- { :ok, _ } ->
+ {:ok, _} ->
if :os.find_executable('git') do
data = :os.cmd('git describe --always --tags')
strip_re(data, "\n")
@@ -83,7 +83,7 @@ defmodule System do
"""
@spec argv([String.t]) :: :ok
def argv(args) do
- :elixir_code_server.cast({ :argv, args })
+ :elixir_code_server.cast({:argv, args})
end
@doc """
@@ -94,7 +94,7 @@ defmodule System do
"""
def cwd do
case :file.get_cwd do
- { :ok, base } -> String.from_char_data!(base)
+ {:ok, base} -> String.from_char_data!(base)
_ -> nil
end
end
@@ -117,7 +117,7 @@ defmodule System do
"""
def user_home do
case :os.type() do
- { :win32, _ } -> get_windows_home
+ {:win32, _} -> get_windows_home
_ -> get_unix_home
end
end
@@ -193,13 +193,13 @@ defmodule System do
{:ok, info} ->
type_index = File.Stat.__record__(:index, :type)
access_index = File.Stat.__record__(:index, :access)
- case { elem(info, type_index), elem(info, access_index) } do
- { :directory, access } when access in [:read_write, :write] ->
+ case {elem(info, type_index), elem(info, access_index)} do
+ {:directory, access} when access in [:read_write, :write] ->
String.from_char_data!(dir)
_ ->
nil
end
- { :error, _ } -> nil
+ {:error, _} -> nil
end
end
@@ -214,7 +214,7 @@ defmodule System do
as an argument.
"""
def at_exit(fun) when is_function(fun, 1) do
- :elixir_code_server.cast { :at_exit, fun }
+ :elixir_code_server.cast {:at_exit, fun}
end
@doc """
diff --git a/lib/elixir/lib/tuple.ex b/lib/elixir/lib/tuple.ex
index f47c8bd2d..8e11e1fab 100644
--- a/lib/elixir/lib/tuple.ex
+++ b/lib/elixir/lib/tuple.ex
@@ -12,7 +12,7 @@ defmodule Tuple do
## Examples
iex> Tuple.duplicate(:hello, 3)
- { :hello, :hello, :hello }
+ {:hello, :hello, :hello}
"""
@spec duplicate(term, non_neg_integer) :: tuple
@@ -29,9 +29,9 @@ defmodule Tuple do
## Examples
- iex> tuple = { :bar, :baz }
+ iex> tuple = {:bar, :baz}
iex> Tuple.insert_at(tuple, 0, :foo)
- { :foo, :bar, :baz }
+ {:foo, :bar, :baz}
"""
@spec insert_at(tuple, non_neg_integer, term) :: tuple
@@ -48,9 +48,9 @@ defmodule Tuple do
## Examples
- iex> tuple = { :foo, :bar, :baz }
+ iex> tuple = {:foo, :bar, :baz}
iex> Tuple.delete_at(tuple, 0)
- { :bar, :baz }
+ {:bar, :baz}
"""
@spec delete_at(tuple, non_neg_integer) :: tuple
diff --git a/lib/elixir/lib/uri.ex b/lib/elixir/lib/uri.ex
index 8772551c3..9c6fcd6b1 100644
--- a/lib/elixir/lib/uri.ex
+++ b/lib/elixir/lib/uri.ex
@@ -16,9 +16,9 @@ defmodule URI do
"ldap" => 389,
"sftp" => 22,
"tftp" => 69,
- }
+ }
- Enum.each @ports, fn { scheme, port } ->
+ Enum.each @ports, fn {scheme, port} ->
def normalize_scheme(unquote(scheme)), do: unquote(scheme)
def default_port(unquote(scheme)), do: unquote(port)
end
@@ -45,7 +45,7 @@ defmodule URI do
"""
def default_port(scheme) when is_binary(scheme) do
- { :ok, dict } = :application.get_env(:elixir, :uri)
+ {:ok, dict} = :application.get_env(:elixir, :uri)
Map.get(dict, scheme)
end
@@ -53,7 +53,7 @@ defmodule URI do
Registers a scheme with a default port.
"""
def default_port(scheme, port) when is_binary(scheme) and port > 0 do
- { :ok, dict } = :application.get_env(:elixir, :uri)
+ {:ok, dict} = :application.get_env(:elixir, :uri)
:application.set_env(:elixir, :uri, Map.put(dict, scheme, port))
end
@@ -92,7 +92,7 @@ defmodule URI do
"""
def decode_query(q, dict \\ %{}) when is_binary(q) do
- Enum.reduce query_decoder(q), dict, fn({ k, v }, acc) -> Dict.put(acc, k, v) end
+ Enum.reduce query_decoder(q), dict, fn({k, v}, acc) -> Dict.put(acc, k, v) end
end
@doc """
@@ -114,19 +114,19 @@ defmodule URI do
end
defp do_decoder(q) do
- { first, next } =
+ {first, next} =
case :binary.split(q, "&") do
- [first, rest] -> { first, rest }
- [first] -> { first, "" }
+ [first, rest] -> {first, rest}
+ [first] -> {first, ""}
end
current =
case :binary.split(first, "=") do
- [ key, value ] -> { decode(key), decode(value) }
- [ key ] -> { decode(key), nil }
+ [ key, value ] -> {decode(key), decode(value)}
+ [ key ] -> {decode(key), nil}
end
- { current, next }
+ {current, next}
end
defp pair({k, _}) when is_list(k) do
@@ -224,7 +224,7 @@ defmodule URI do
parts = nillify(Regex.run(regex, s))
destructure [_, _, scheme, _, authority, path, _, query, _, fragment], parts
- { userinfo, host, port } = split_authority(authority)
+ {userinfo, host, port} = split_authority(authority)
if authority do
authority = ""
@@ -256,7 +256,7 @@ defmodule URI do
port = if port, do: binary_to_integer(port)
host = if host, do: host |> String.lstrip(?[) |> String.rstrip(?])
- { userinfo, host, port }
+ {userinfo, host, port}
end
# Regex.run returns empty strings sometimes. We want
diff --git a/lib/elixir/lib/version.ex b/lib/elixir/lib/version.ex
index 6e3da436c..7966533e7 100644
--- a/lib/elixir/lib/version.ex
+++ b/lib/elixir/lib/version.ex
@@ -49,10 +49,10 @@ defmodule Version do
@type t :: String.t | Version.Schema.t
@type requirement :: String.t | Version.Requirement.t
- @type matchable :: { major :: String.t | non_neg_integer,
+ @type matchable :: {major :: String.t | non_neg_integer,
minor :: non_neg_integer | nil,
patch :: non_neg_integer | nil,
- pre :: [String.t] }
+ pre :: [String.t]}
import Kernel, except: [match?: 2]
@@ -89,7 +89,7 @@ defmodule Version do
@spec match?(t, requirement) :: boolean
def match?(vsn, req) when is_binary(req) do
case parse_requirement(req) do
- { :ok, req } ->
+ {:ok, req} ->
match?(vsn, req)
:error ->
raise InvalidRequirement, message: req
@@ -97,7 +97,7 @@ defmodule Version do
end
def match?(version, Requirement[matchspec: spec]) do
- { :ok, result } = :ets.test_ms(to_matchable(version), spec)
+ {:ok, result} = :ets.test_ms(to_matchable(version), spec)
result != false
end
@@ -126,10 +126,10 @@ defmodule Version do
do_compare(to_matchable(vsn1), to_matchable(vsn2))
end
- defp do_compare({ major1, minor1, patch1, pre1 }, { major2, minor2, patch2, pre2 }) do
+ defp do_compare({major1, minor1, patch1, pre1}, {major2, minor2, patch2, pre2}) do
cond do
- { major1, minor1, patch1 } > { major2, minor2, patch2 } -> :gt
- { major1, minor1, patch1 } < { major2, minor2, patch2 } -> :lt
+ {major1, minor1, patch1} > {major2, minor2, patch2} -> :gt
+ {major1, minor1, patch1} < {major2, minor2, patch2} -> :lt
pre1 == [] and pre2 != [] -> :gt
pre1 != [] and pre2 == [] -> :lt
pre1 > pre2 -> :gt
@@ -150,13 +150,13 @@ defmodule Version do
:error
"""
- @spec parse(String.t) :: { :ok, Schema.t } | :error
+ @spec parse(String.t) :: {:ok, Schema.t} | :error
def parse(string) when is_binary(string) do
case Version.Parser.parse_version(string) do
- { :ok, { major, minor, patch, pre } } ->
+ {:ok, {major, minor, patch, pre}} ->
vsn = Version.Schema[major: major, minor: minor, patch: patch,
pre: pre, source: string, build: get_build(string)]
- { :ok, vsn }
+ {:ok, vsn}
:error ->
:error
end
@@ -174,23 +174,23 @@ defmodule Version do
:error
"""
- @spec parse_requirement(String.t) :: { :ok, Requirement.t } | :error
+ @spec parse_requirement(String.t) :: {:ok, Requirement.t} | :error
def parse_requirement(string) when is_binary(string) do
case Version.Parser.parse_requirement(string) do
- { :ok, spec } ->
- { :ok, Requirement[source: string, matchspec: spec] }
+ {:ok, spec} ->
+ {:ok, Requirement[source: string, matchspec: spec]}
:error ->
:error
end
end
defp to_matchable(Schema[major: major, minor: minor, patch: patch, pre: pre]) do
- { major, minor, patch, pre }
+ {major, minor, patch, pre}
end
defp to_matchable(string) do
case Version.Parser.parse_version(string) do
- { :ok, vsn } -> vsn
+ {:ok, vsn} -> vsn
:error -> raise InvalidVersion, message: string
end
end
@@ -304,8 +304,8 @@ defmodule Version do
patch = patch && binary_to_integer(patch)
case parse_pre(pre) do
- { :ok, pre } ->
- { :ok, { major, minor, patch, pre } }
+ {:ok, pre} ->
+ {:ok, {major, minor, patch, pre}}
:error ->
:error
end
@@ -315,7 +315,7 @@ defmodule Version do
end
end
- defp parse_pre(nil), do: { :ok, [] }
+ defp parse_pre(nil), do: {:ok, []}
defp parse_pre(pre), do: parse_pre(String.split(pre, "."), [])
defp parse_pre([piece|t], acc) do
@@ -330,7 +330,7 @@ defmodule Version do
end
defp parse_pre([], acc) do
- { :ok, Enum.reverse(acc) }
+ {:ok, Enum.reverse(acc)}
end
defp valid_requirement?([]), do: false
@@ -367,11 +367,11 @@ defmodule Version do
defp approximate_upper(version) do
case version do
- { major, _minor, nil, _ } ->
- { major + 1, 0, 0, [0] }
+ {major, _minor, nil, _} ->
+ {major + 1, 0, 0, [0]}
- { major, minor, _patch, _ } ->
- { major, minor + 1, 0, [0] }
+ {major, minor, _patch, _} ->
+ {major, minor + 1, 0, [0]}
end
end
@@ -379,7 +379,7 @@ defmodule Version do
if valid_requirement?(lexed) do
first = to_condition(lexed)
rest = Enum.drop(lexed, 2)
- { :ok, [{{ :'$1', :'$2', :'$3', :'$4' }, [to_condition(first, rest)], [:'$_'] }] }
+ {:ok, [{{:'$1', :'$2', :'$3', :'$4'}, [to_condition(first, rest)], [:'$_']}]}
else
:error
end
@@ -389,68 +389,68 @@ defmodule Version do
defp to_condition([:'==', version | _]) do
version = parse_condition(version)
- { :'==', :'$_', { :const, version } }
+ {:'==', :'$_', {:const, version}}
end
defp to_condition([:'!=', version | _]) do
version = parse_condition(version)
- { :'/=', :'$_', { :const, version } }
+ {:'/=', :'$_', {:const, version}}
end
defp to_condition([:'~>', version | _]) do
from = parse_condition(version, true)
to = approximate_upper(from)
- { :andalso, to_condition([:'>=', matchable_to_string(from)]),
- to_condition([:'<', matchable_to_string(to)]) }
+ {:andalso, to_condition([:'>=', matchable_to_string(from)]),
+ to_condition([:'<', matchable_to_string(to)])}
end
defp to_condition([:'>', version | _]) do
- { major, minor, patch, pre } = parse_condition(version)
+ {major, minor, patch, pre} = parse_condition(version)
- { :andalso, { :not, { :is_binary, :'$1' } },
- { :orelse, { :'>', {{ :'$1', :'$2', :'$3' }},
- { :const, { major, minor, patch } } },
- { :andalso, { :'==', {{ :'$1', :'$2', :'$3' }},
- { :const, { major, minor, patch } } },
- { :orelse, { :andalso, { :'==', { :length, :'$4' }, 0 },
- { :'/=', length(pre), 0 } },
- { :andalso, { :'/=', length(pre), 0 },
- { :orelse, { :'>', { :length, :'$4' }, length(pre) },
- { :andalso, { :'==', { :length, :'$4' }, length(pre) },
- { :'>', :'$4', { :const, pre } } } } } } } } }
+ {:andalso, {:not, {:is_binary, :'$1'}},
+ {:orelse, {:'>', {{:'$1', :'$2', :'$3'}},
+ {:const, {major, minor, patch}}},
+ {:andalso, {:'==', {{:'$1', :'$2', :'$3'}},
+ {:const, {major, minor, patch}}},
+ {:orelse, {:andalso, {:'==', {:length, :'$4'}, 0},
+ {:'/=', length(pre), 0}},
+ {:andalso, {:'/=', length(pre), 0},
+ {:orelse, {:'>', {:length, :'$4'}, length(pre)},
+ {:andalso, {:'==', {:length, :'$4'}, length(pre)},
+ {:'>', :'$4', {:const, pre}}}}}}}}}
end
defp to_condition([:'>=', version | _]) do
matchable = parse_condition(version)
- { :orelse, { :andalso, { :not, { :is_binary, :'$1' } },
- { :'==', :'$_', { :const, matchable } } },
- to_condition([:'>', version]) }
+ {:orelse, {:andalso, {:not, {:is_binary, :'$1'}},
+ {:'==', :'$_', {:const, matchable}}},
+ to_condition([:'>', version])}
end
defp to_condition([:'<', version | _]) do
- { major, minor, patch, pre } = parse_condition(version)
+ {major, minor, patch, pre} = parse_condition(version)
- { :andalso, { :not, { :is_binary, :'$1' } },
- { :orelse, { :'<', {{ :'$1', :'$2', :'$3' }},
- { :const, { major, minor, patch } } },
- { :andalso, { :'==', {{ :'$1', :'$2', :'$3' }},
- { :const, { major, minor, patch } } },
- { :orelse, { :andalso, { :'/=', { :length, :'$4' }, 0 },
- { :'==', length(pre), 0 } },
- { :andalso, { :'/=', { :length, :'$4' }, 0 },
- { :orelse, { :'<', { :length, :'$4' }, length(pre) },
- { :andalso, { :'==', { :length, :'$4' }, length(pre) },
- { :'<', :'$4', { :const, pre } } } } } } } } }
+ {:andalso, {:not, {:is_binary, :'$1'}},
+ {:orelse, {:'<', {{:'$1', :'$2', :'$3'}},
+ {:const, {major, minor, patch}}},
+ {:andalso, {:'==', {{:'$1', :'$2', :'$3'}},
+ {:const, {major, minor, patch}}},
+ {:orelse, {:andalso, {:'/=', {:length, :'$4'}, 0},
+ {:'==', length(pre), 0}},
+ {:andalso, {:'/=', {:length, :'$4'}, 0},
+ {:orelse, {:'<', {:length, :'$4'}, length(pre)},
+ {:andalso, {:'==', {:length, :'$4'}, length(pre)},
+ {:'<', :'$4', {:const, pre}}}}}}}}}
end
defp to_condition([:'<=', version | _]) do
matchable = parse_condition(version)
- { :orelse, { :andalso, { :not, { :is_binary, :'$1' } },
- { :'==', :'$_', { :const, matchable } } },
- to_condition([:'<', version]) }
+ {:orelse, {:andalso, {:not, {:is_binary, :'$1'}},
+ {:'==', :'$_', {:const, matchable}}},
+ to_condition([:'<', version])}
end
defp to_condition(current, []) do
@@ -458,21 +458,21 @@ defmodule Version do
end
defp to_condition(current, [:'&&', operator, version | rest]) do
- to_condition({ :andalso, current, to_condition([operator, version]) }, rest)
+ to_condition({:andalso, current, to_condition([operator, version])}, rest)
end
defp to_condition(current, [:'||', operator, version | rest]) do
- to_condition({ :orelse, current, to_condition([operator, version]) }, rest)
+ to_condition({:orelse, current, to_condition([operator, version])}, rest)
end
defp parse_condition(version, approximate? \\ false) do
case parse_version(version, approximate?) do
- { :ok, version } -> version
+ {:ok, version} -> version
:error -> throw :invalid_matchspec
end
end
- defp matchable_to_string({ major, minor, patch, pre }) do
+ defp matchable_to_string({major, minor, patch, pre}) do
patch = if patch, do: "#{patch}", else: "0"
pre = if pre != [], do: "-#{Enum.join(pre, ".")}"
"#{major}.#{minor}.#{patch}#{pre}"
diff --git a/lib/elixir/src/elixir.erl b/lib/elixir/src/elixir.erl
index 05f4991c9..6e299a65e 100644
--- a/lib/elixir/src/elixir.erl
+++ b/lib/elixir/src/elixir.erl
@@ -62,46 +62,46 @@ env_for_eval(Opts) ->
requires = elixir_dispatch:default_requires(),
functions = elixir_dispatch:default_functions(),
macros = elixir_dispatch:default_macros()
- }, Opts).
+ }, Opts).
env_for_eval(Env, Opts) ->
Line = case lists:keyfind(line, 1, Opts) of
- { line, RawLine } when is_integer(RawLine) -> RawLine;
+ {line, RawLine} when is_integer(RawLine) -> RawLine;
false -> Env#elixir_env.line
end,
File = case lists:keyfind(file, 1, Opts) of
- { file, RawFile } when is_binary(RawFile) -> RawFile;
+ {file, RawFile} when is_binary(RawFile) -> RawFile;
false -> Env#elixir_env.file
end,
Local = case lists:keyfind(delegate_locals_to, 1, Opts) of
- { delegate_locals_to, LocalOpt } -> LocalOpt;
+ {delegate_locals_to, LocalOpt} -> LocalOpt;
false -> Env#elixir_env.local
end,
Aliases = case lists:keyfind(aliases, 1, Opts) of
- { aliases, AliasesOpt } -> AliasesOpt;
+ {aliases, AliasesOpt} -> AliasesOpt;
false -> Env#elixir_env.aliases
end,
Requires = case lists:keyfind(requires, 1, Opts) of
- { requires, List } -> ordsets:from_list(List);
+ {requires, List} -> ordsets:from_list(List);
false -> Env#elixir_env.requires
end,
Functions = case lists:keyfind(functions, 1, Opts) of
- { functions, FunctionsOpt } -> FunctionsOpt;
+ {functions, FunctionsOpt} -> FunctionsOpt;
false -> Env#elixir_env.functions
end,
Macros = case lists:keyfind(macros, 1, Opts) of
- { macros, MacrosOpt } -> MacrosOpt;
+ {macros, MacrosOpt} -> MacrosOpt;
false -> Env#elixir_env.macros
end,
Module = case lists:keyfind(module, 1, Opts) of
- { module, ModuleOpt } when is_atom(ModuleOpt) -> ModuleOpt;
+ {module, ModuleOpt} when is_atom(ModuleOpt) -> ModuleOpt;
false -> nil
end,
@@ -109,7 +109,7 @@ env_for_eval(Env, Opts) ->
file=File, local=Local, module=Module,
macros=Macros, functions=Functions,
requires=Requires, aliases=Aliases, line=Line
- }.
+ }.
%% String evaluation
@@ -139,16 +139,16 @@ eval_forms(Tree, Binding, #elixir_env{} = E) ->
eval_forms(Tree, Binding, Opts) when is_list(Opts) ->
eval_forms(Tree, Binding, env_for_eval(Opts)).
eval_forms(Tree, Binding, Env, Scope) ->
- { ParsedBinding, ParsedScope } = elixir_scope:load_binding(Binding, Scope),
+ {ParsedBinding, ParsedScope} = elixir_scope:load_binding(Binding, Scope),
ParsedEnv = Env#elixir_env{vars=[K || {K,_} <- ParsedScope#elixir_scope.vars]},
- { Erl, NewEnv, NewScope } = quoted_to_erl(Tree, ParsedEnv, ParsedScope),
+ {Erl, NewEnv, NewScope} = quoted_to_erl(Tree, ParsedEnv, ParsedScope),
case Erl of
- { atom, _, Atom } ->
- { Atom, Binding, NewEnv, NewScope };
+ {atom, _, Atom} ->
+ {Atom, Binding, NewEnv, NewScope};
_ ->
- { value, Value, NewBinding } = erl_eval:expr(Erl, ParsedBinding),
- { Value, elixir_scope:dump_binding(NewBinding, NewScope), NewEnv, NewScope }
+ {value, Value, NewBinding} = erl_eval:expr(Erl, ParsedBinding),
+ {Value, elixir_scope:dump_binding(NewBinding, NewScope), NewEnv, NewScope}
end.
%% Converts a quoted expression to erlang abstract format
@@ -157,29 +157,29 @@ quoted_to_erl(Quoted, Env) ->
quoted_to_erl(Quoted, Env, elixir_env:env_to_scope(Env)).
quoted_to_erl(Quoted, Env, Scope) ->
- { Expanded, NewEnv } = elixir_exp:expand(Quoted, Env),
- { Erl, NewScope } = elixir_translator:translate(Expanded, Scope),
- { Erl, NewEnv, NewScope }.
+ {Expanded, NewEnv} = elixir_exp:expand(Quoted, Env),
+ {Erl, NewScope} = elixir_translator:translate(Expanded, Scope),
+ {Erl, NewEnv, NewScope}.
%% Converts a given string (char list) into quote expression
string_to_quoted(String, StartLine, File, Opts) when is_integer(StartLine), is_binary(File) ->
- case elixir_tokenizer:tokenize(String, StartLine, [{ file, File }|Opts]) of
- { ok, _Line, Tokens } ->
+ case elixir_tokenizer:tokenize(String, StartLine, [{file, File}|Opts]) of
+ {ok, _Line, Tokens} ->
try elixir_parser:parse(Tokens) of
- { ok, Forms } -> { ok, Forms };
- { error, { Line, _, [Error, Token] } } -> { error, { Line, to_binary(Error), to_binary(Token) } }
+ {ok, Forms} -> {ok, Forms};
+ {error, {Line, _, [Error, Token]}} -> {error, {Line, to_binary(Error), to_binary(Token)}}
catch
- { error, { Line, _, [Error, Token] } } -> { error, { Line, to_binary(Error), to_binary(Token) } }
+ {error, {Line, _, [Error, Token]}} -> {error, {Line, to_binary(Error), to_binary(Token)}}
end;
- { error, { Line, Error, Token }, _Rest, _SoFar } -> { error, { Line, to_binary(Error), to_binary(Token) } }
+ {error, {Line, Error, Token}, _Rest, _SoFar} -> {error, {Line, to_binary(Error), to_binary(Token)}}
end.
'string_to_quoted!'(String, StartLine, File, Opts) ->
case string_to_quoted(String, StartLine, File, Opts) of
- { ok, Forms } ->
+ {ok, Forms} ->
Forms;
- { error, { Line, Error, Token } } ->
+ {error, {Line, Error, Token}} ->
elixir_errors:parse_error(Line, File, Error, Token)
end.
diff --git a/lib/elixir/src/elixir_aliases.erl b/lib/elixir/src/elixir_aliases.erl
index 425ed3541..b94ddaf57 100644
--- a/lib/elixir/src/elixir_aliases.erl
+++ b/lib/elixir/src/elixir_aliases.erl
@@ -11,19 +11,19 @@ inspect(Atom) when is_atom(Atom) ->
%% Store an alias in the given scope
store(_Meta, New, New, _TKV, Aliases, MacroAliases, _Lexical) ->
- { Aliases, MacroAliases };
+ {Aliases, MacroAliases};
store(Meta, New, Old, TKV, Aliases, MacroAliases, Lexical) ->
record_warn(Meta, New, TKV, Lexical),
- { store_alias(New, Old, Aliases),
- store_macro_alias(Meta, New, Old, MacroAliases) }.
+ {store_alias(New, Old, Aliases),
+ store_macro_alias(Meta, New, Old, MacroAliases)}.
store_alias(New, Old, Aliases) ->
- lists:keystore(New, 1, Aliases, { New, Old }).
+ lists:keystore(New, 1, Aliases, {New, Old}).
store_macro_alias(Meta, New, Old, Aliases) ->
case lists:keymember(context, 1, Meta) andalso
lists:keyfind(counter, 1, Meta) of
- { counter, Counter } when is_integer(Counter) ->
- lists:keystore(New, 1, Aliases, { New, { Counter, Old } });
+ {counter, Counter} when is_integer(Counter) ->
+ lists:keystore(New, 1, Aliases, {New, {Counter, Old}});
_ ->
Aliases
end.
@@ -31,8 +31,8 @@ store_macro_alias(Meta, New, Old, Aliases) ->
record_warn(Meta, Ref, Opts, Lexical) ->
Warn =
case lists:keyfind(warn, 1, Opts) of
- { warn, false } -> false;
- { warn, true } -> true;
+ {warn, false} -> false;
+ {warn, true} -> true;
false -> not lists:keymember(context, 1, Meta)
end,
elixir_lexical:record_alias(Ref, ?line(Meta), Warn, Lexical).
@@ -40,23 +40,23 @@ record_warn(Meta, Ref, Opts, Lexical) ->
%% Expand an alias. It returns an atom (meaning that there
%% was an expansion) or a list of atoms.
-expand({ '__aliases__', _Meta, ['Elixir'|_] = List }, _Aliases, _MacroAliases, _LexicalTracker) ->
+expand({'__aliases__', _Meta, ['Elixir'|_] = List}, _Aliases, _MacroAliases, _LexicalTracker) ->
concat(List);
-expand({ '__aliases__', Meta, _ } = Alias, Aliases, MacroAliases, LexicalTracker) ->
+expand({'__aliases__', Meta, _} = Alias, Aliases, MacroAliases, LexicalTracker) ->
case lists:keyfind(alias, 1, Meta) of
- { alias, false } ->
+ {alias, false} ->
expand(Alias, MacroAliases, LexicalTracker);
- { alias, Atom } when is_atom(Atom) ->
+ {alias, Atom} when is_atom(Atom) ->
Atom;
false ->
expand(Alias, Aliases, LexicalTracker)
end.
-expand({ '__aliases__', Meta, [H|T] }, Aliases, LexicalTracker) when is_atom(H) ->
+expand({'__aliases__', Meta, [H|T]}, Aliases, LexicalTracker) when is_atom(H) ->
Lookup = list_to_atom("Elixir." ++ atom_to_list(H)),
Counter = case lists:keyfind(counter, 1, Meta) of
- { counter, C } -> C;
+ {counter, C} -> C;
_ -> nil
end,
case lookup(Lookup, Aliases, Counter) of
@@ -69,7 +69,7 @@ expand({ '__aliases__', Meta, [H|T] }, Aliases, LexicalTracker) when is_atom(H)
end
end;
-expand({ '__aliases__', _Meta, List }, _Aliases, _LexicalTracker) ->
+expand({'__aliases__', _Meta, List}, _Aliases, _LexicalTracker) ->
List.
%% Ensure a module is loaded before its usage.
@@ -84,7 +84,7 @@ ensure_loaded(Meta, Ref, E) ->
true -> scheduled_module;
false -> unloaded_module
end,
- elixir_errors:form_error(Meta, E#elixir_env.file, ?MODULE, { Kind, Ref })
+ elixir_errors:form_error(Meta, E#elixir_env.file, ?MODULE, {Kind, Ref})
end.
%% Receives an atom and returns the last bit as an alias.
@@ -129,8 +129,8 @@ to_partial(Arg) when is_binary(Arg) -> Arg.
lookup(Else, Dict, Counter) ->
case lists:keyfind(Else, 1, Dict) of
- { Else, { Counter, Value } } -> lookup(Value, Dict, Counter);
- { Else, Value } when is_atom(Value) -> lookup(Value, Dict, Counter);
+ {Else, {Counter, Value}} -> lookup(Value, Dict, Counter);
+ {Else, Value} when is_atom(Value) -> lookup(Value, Dict, Counter);
_ -> Else
end.
diff --git a/lib/elixir/src/elixir_bitstring.erl b/lib/elixir/src/elixir_bitstring.erl
index 6b5d73c6e..19c92e668 100644
--- a/lib/elixir/src/elixir_bitstring.erl
+++ b/lib/elixir/src/elixir_bitstring.erl
@@ -7,22 +7,22 @@
expand(Meta, Args, E) ->
case E#elixir_env.context of
match ->
- { EArgs, EA } = expand_bitstr(fun elixir_exp:expand/2, Args, [], E),
- { { '<<>>', Meta, EArgs }, EA };
+ {EArgs, EA} = expand_bitstr(fun elixir_exp:expand/2, Args, [], E),
+ {{'<<>>', Meta, EArgs}, EA};
_ ->
- { EArgs, { EC, EV } } = expand_bitstr(fun elixir_exp:expand_arg/2, Args, [], { E, E }),
- { { '<<>>', Meta, EArgs }, elixir_env:mergea(EV, EC) }
+ {EArgs, {EC, EV}} = expand_bitstr(fun elixir_exp:expand_arg/2, Args, [], {E, E}),
+ {{'<<>>', Meta, EArgs}, elixir_env:mergea(EV, EC)}
end.
expand_bitstr(_Fun, [], Acc, E) ->
- { lists:reverse(Acc), E };
+ {lists:reverse(Acc), E};
expand_bitstr(Fun, [{'::',Meta,[Left,Right]}|T], Acc, E) ->
- { ELeft, EL } = Fun(Left, E),
+ {ELeft, EL} = Fun(Left, E),
%% Variables defined outside the binary can be accounted
%% on subparts, however we can't assign new variables.
case E of
- { ER, _ } -> ok; %% expand_arg, no assigns
+ {ER, _} -> ok; %% expand_arg, no assigns
_ -> ER = E#elixir_env{context=nil} %% expand_each, revert assigns
end,
@@ -30,7 +30,7 @@ expand_bitstr(Fun, [{'::',Meta,[Left,Right]}|T], Acc, E) ->
expand_bitstr(Fun, T, [{'::',Meta,[ELeft,ERight]}|Acc], EL);
expand_bitstr(Fun, [H|T], Acc, E) ->
- { Expr, ES } = Fun(H, E),
+ {Expr, ES} = Fun(H, E),
expand_bitstr(Fun, T, [Expr|Acc], ES).
%% Expand bit info
@@ -41,25 +41,25 @@ expand_bit_info(Meta, Info, E) when is_list(Info) ->
expand_bit_info(Meta, Info, E) ->
expand_bit_info(Meta, [Info], E).
-expand_bit_info(Meta, [{ Expr, ExprMeta, Args }|T], Size, Types, E) when is_atom(Expr) ->
+expand_bit_info(Meta, [{Expr, ExprMeta, Args}|T], Size, Types, E) when is_atom(Expr) ->
ListArgs = if is_atom(Args) -> []; is_list(Args) -> Args end,
case expand_bit_type_or_size(Expr, ListArgs) of
type ->
- { EArgs, EE } = elixir_exp:expand_args(ListArgs, E),
- expand_bit_info(Meta, T, Size, [{ Expr, [], EArgs }|Types], EE);
+ {EArgs, EE} = elixir_exp:expand_args(ListArgs, E),
+ expand_bit_info(Meta, T, Size, [{Expr, [], EArgs}|Types], EE);
size ->
case Size of
default -> ok;
_ -> elixir_errors:compile_error(Meta, E#elixir_env.file, "duplicated size definition in bitstring")
end,
- { EArgs, EE } = elixir_exp:expand_args(ListArgs, E),
- expand_bit_info(Meta, T, { Expr, [], EArgs }, Types, EE);
+ {EArgs, EE} = elixir_exp:expand_args(ListArgs, E),
+ expand_bit_info(Meta, T, {Expr, [], EArgs}, Types, EE);
none ->
- handle_unknown_bit_info(Meta, { Expr, ExprMeta, ListArgs }, T, Size, Types, E)
+ handle_unknown_bit_info(Meta, {Expr, ExprMeta, ListArgs}, T, Size, Types, E)
end;
expand_bit_info(Meta, [Int|T], Size, Types, E) when is_integer(Int) ->
- expand_bit_info(Meta, [{ size, [], [Int] }|T], Size, Types, E);
+ expand_bit_info(Meta, [{size, [], [Int]}|T], Size, Types, E);
expand_bit_info(Meta, [Expr|_], _Size, _Types, E) ->
elixir_errors:compile_error(Meta, E#elixir_env.file,
@@ -89,8 +89,8 @@ expand_bit_type_or_size(unit, [_]) -> type;
expand_bit_type_or_size(size, [_]) -> size;
expand_bit_type_or_size(_, _) -> none.
-handle_unknown_bit_info(Meta, { _, ExprMeta, _ } = Expr, T, Size, Types, E) ->
- case 'Elixir.Macro':expand(Expr, elixir_env:env_to_ex({ ?line(ExprMeta), E })) of
+handle_unknown_bit_info(Meta, {_, ExprMeta, _} = Expr, T, Size, Types, E) ->
+ case 'Elixir.Macro':expand(Expr, elixir_env:env_to_ex({?line(ExprMeta), E})) of
Expr ->
elixir_errors:compile_error(ExprMeta, E#elixir_env.file,
"unknown bitstring specifier ~ts", ['Elixir.Macro':to_string(Expr)]);
@@ -101,8 +101,8 @@ handle_unknown_bit_info(Meta, { _, ExprMeta, _ } = Expr, T, Size, Types, E) ->
%% Translation
-has_size({ bin, _, Elements }) ->
- not lists:any(fun({ bin_element, _Line, _Expr, Size, Types }) ->
+has_size({bin, _, Elements}) ->
+ not lists:any(fun({bin_element, _Line, _Expr, Size, Types}) ->
(Types /= default) andalso (Size == default) andalso
lists:any(fun(X) -> lists:member(X, Types) end,
[bits, bytes, bitstring, binary])
@@ -117,14 +117,14 @@ translate(Meta, Args, S) ->
end.
build_bitstr(Fun, Exprs, Meta, S) ->
- { Final, FinalS } = build_bitstr_each(Fun, Exprs, Meta, S, []),
- { { bin, ?line(Meta), lists:reverse(Final) }, FinalS }.
+ {Final, FinalS} = build_bitstr_each(Fun, Exprs, Meta, S, []),
+ {{bin, ?line(Meta), lists:reverse(Final)}, FinalS}.
build_bitstr_each(_Fun, [], _Meta, S, Acc) ->
- { Acc, S };
+ {Acc, S};
build_bitstr_each(Fun, [{'::',_,[H,V]}|T], Meta, S, Acc) ->
- { Size, Types } = extract_bit_info(Meta, V, S#elixir_scope{context=nil}),
+ {Size, Types} = extract_bit_info(Meta, V, S#elixir_scope{context=nil}),
build_bitstr_each(Fun, T, Meta, S, Acc, H, Size, Types);
build_bitstr_each(Fun, [H|T], Meta, S, Acc) ->
@@ -136,11 +136,11 @@ build_bitstr_each(Fun, T, Meta, S, Acc, H, default, Types) when is_binary(H) ->
true ->
%% See explanation in elixir_utils:elixir_to_erl/1 to know
%% why we can simply convert the binary to a list.
- { bin_element, ?line(Meta), { string, 0, binary_to_list(H) }, default, default };
+ {bin_element, ?line(Meta), {string, 0, binary_to_list(H)}, default, default};
false ->
case types_require_conversion(Types) of
true ->
- { bin_element, ?line(Meta), { string, 0, elixir_utils:characters_to_list(H) }, default, Types };
+ {bin_element, ?line(Meta), {string, 0, elixir_utils:characters_to_list(H)}, default, Types};
false ->
elixir_errors:compile_error(Meta, S#elixir_scope.file, "invalid types for literal string in <<>>. "
"Accepted types are: little, big, utf8, utf16, utf32, bits, bytes, binary, bitstring")
@@ -157,16 +157,16 @@ build_bitstr_each(_Fun, _T, Meta, S, _Acc, H, _Size, _Types) when is_list(H); is
['Elixir.Macro':to_string(H)]);
build_bitstr_each(Fun, T, Meta, S, Acc, H, Size, Types) ->
- { Expr, NS } = Fun(H, S),
+ {Expr, NS} = Fun(H, S),
case Expr of
- { bin, _, Elements } ->
+ {bin, _, Elements} ->
case (Size == default) andalso types_allow_splice(Types, Elements) of
true -> build_bitstr_each(Fun, T, Meta, NS, lists:reverse(Elements) ++ Acc);
- false -> build_bitstr_each(Fun, T, Meta, NS, [{ bin_element, ?line(Meta), Expr, Size, Types }|Acc])
+ false -> build_bitstr_each(Fun, T, Meta, NS, [{bin_element, ?line(Meta), Expr, Size, Types}|Acc])
end;
_ ->
- build_bitstr_each(Fun, T, Meta, NS, [{ bin_element, ?line(Meta), Expr, Size, Types }|Acc])
+ build_bitstr_each(Fun, T, Meta, NS, [{bin_element, ?line(Meta), Expr, Size, Types}|Acc])
end.
types_require_conversion([End|T]) when End == little; End == big -> types_require_conversion(T);
@@ -201,23 +201,23 @@ unit_size([], Guess) -> Guess.
%% Extra bitstring specifiers
-extract_bit_info(Meta, [{ size, _, [Arg] }|T], S) ->
+extract_bit_info(Meta, [{size, _, [Arg]}|T], S) ->
case elixir_translator:translate(Arg, S) of
- { { Kind, _, _ } = Size, _ } when Kind == integer; Kind == var ->
- { Size, extract_bit_type(Meta, T, S) };
+ {{Kind, _, _} = Size, _} when Kind == integer; Kind == var ->
+ {Size, extract_bit_type(Meta, T, S)};
_ ->
elixir_errors:compile_error(Meta, S#elixir_scope.file,
"size in bitstring expects an integer or a variable as argument, got: ~ts", ['Elixir.Macro':to_string(Arg)])
end;
extract_bit_info(Meta, T, S) ->
- { default, extract_bit_type(Meta, T, S) }.
+ {default, extract_bit_type(Meta, T, S)}.
-extract_bit_type(Meta, [{ unit, _, [Arg] }|T], S) when is_integer(Arg) ->
- [{ unit, Arg }|extract_bit_type(Meta, T, S)];
-extract_bit_type(Meta, [{ unit, _, [Arg] }|_], S) ->
+extract_bit_type(Meta, [{unit, _, [Arg]}|T], S) when is_integer(Arg) ->
+ [{unit, Arg}|extract_bit_type(Meta, T, S)];
+extract_bit_type(Meta, [{unit, _, [Arg]}|_], S) ->
elixir_errors:compile_error(Meta, S#elixir_scope.file,
"unit in bitstring expects an integer as argument, got: ~ts", ['Elixir.Macro':to_string(Arg)]);
-extract_bit_type(Meta, [{ Other, _, [] }|T], S) ->
+extract_bit_type(Meta, [{Other, _, []}|T], S) ->
[Other|extract_bit_type(Meta, T, S)];
extract_bit_type(_Meta, [], _S) ->
[].
diff --git a/lib/elixir/src/elixir_bootstrap.erl b/lib/elixir/src/elixir_bootstrap.erl
index f1bf000ed..97cf07fb2 100644
--- a/lib/elixir/src/elixir_bootstrap.erl
+++ b/lib/elixir/src/elixir_bootstrap.erl
@@ -19,14 +19,14 @@
'MACRO-defmacrop'(Caller, Call, Expr) -> define(Caller, defmacrop, Call, Expr).
'MACRO-defmodule'(_Caller, Alias, [{do,Block}]) ->
- { Escaped, _ } = elixir_quote:escape(Block, false),
+ {Escaped, _} = elixir_quote:escape(Block, false),
Args = [Alias, Escaped, [], env()],
- { { '.', [], [elixir_module, compile] }, [], Args }.
+ {{'.', [], [elixir_module, compile]}, [], Args}.
'__info__'(functions) ->
[];
'__info__'(macros) ->
- [{ '@', 1 },
+ [{'@', 1},
{def,1},
{def,2},
{defmacro,1},
@@ -36,16 +36,16 @@
{defp,2}].
define({Line,E}, Kind, Call, Expr) ->
- { EscapedCall, UC } = elixir_quote:escape(Call, true),
- { EscapedExpr, UE } = elixir_quote:escape(Expr, true),
+ {EscapedCall, UC} = elixir_quote:escape(Call, true),
+ {EscapedExpr, UE} = elixir_quote:escape(Expr, true),
Args = [Line, Kind, not(UC or UE), EscapedCall, EscapedExpr, elixir_locals:cache_env(E)],
- { { '.', [], [elixir_def, store_definition] }, [], Args }.
+ {{'.', [], [elixir_def, store_definition]}, [], Args}.
unless_loaded(Fun, Args, Callback) ->
case code:is_loaded(?kernel) of
- { _, _} -> apply(?kernel, Fun, Args);
+ {_, _} -> apply(?kernel, Fun, Args);
false -> Callback()
end.
env() ->
- { '__ENV__', [], nil }. \ No newline at end of file
+ {'__ENV__', [], nil}. \ No newline at end of file
diff --git a/lib/elixir/src/elixir_clauses.erl b/lib/elixir/src/elixir_clauses.erl
index f0ae1e712..5d8952943 100644
--- a/lib/elixir/src/elixir_clauses.erl
+++ b/lib/elixir/src/elixir_clauses.erl
@@ -11,9 +11,9 @@ get_pairs(Key, Clauses) ->
get_pairs(Key, Clauses, false).
get_pairs(Key, Clauses, AllowNil) ->
case lists:keyfind(Key, 1, Clauses) of
- { Key, Pairs } when is_list(Pairs) ->
- [{ Key, Meta, Left, Right } || { '->', Meta, [Left, Right] } <- Pairs];
- { Key, nil } when AllowNil ->
+ {Key, Pairs} when is_list(Pairs) ->
+ [{Key, Meta, Left, Right} || {'->', Meta, [Left, Right]} <- Pairs];
+ {Key, nil} when AllowNil ->
[];
false ->
[]
@@ -23,17 +23,17 @@ get_pairs(Key, Clauses, AllowNil) ->
match(Fun, Args, #elixir_scope{context=Context, match_vars=MatchVars,
backup_vars=BackupVars, vars=Vars} = S) when Context /= match ->
- { Result, NewS } = match(Fun, Args, S#elixir_scope{context=match,
+ {Result, NewS} = match(Fun, Args, S#elixir_scope{context=match,
match_vars=ordsets:new(), backup_vars=Vars}),
- { Result, NewS#elixir_scope{context=Context,
- match_vars=MatchVars, backup_vars=BackupVars} };
+ {Result, NewS#elixir_scope{context=Context,
+ match_vars=MatchVars, backup_vars=BackupVars}};
match(Fun, Args, S) -> Fun(Args, S).
%% Translate clauses with args, guards and expressions
clause(Line, Fun, Args, Expr, Guards, Return, S) when is_integer(Line) ->
- { TArgs, SA } = match(Fun, Args, S#elixir_scope{extra_guards=[]}),
- { TExpr, SE } = elixir_translator:translate_block(Expr, Return, SA#elixir_scope{extra_guards=nil}),
+ {TArgs, SA} = match(Fun, Args, S#elixir_scope{extra_guards=[]}),
+ {TExpr, SE} = elixir_translator:translate_block(Expr, Return, SA#elixir_scope{extra_guards=nil}),
SG = SA#elixir_scope{context=guard, extra_guards=nil},
Extra = SA#elixir_scope.extra_guards,
@@ -43,46 +43,46 @@ clause(Line, Fun, Args, Expr, Guards, Return, S) when is_integer(Line) ->
_ -> [translate_guard(Line, Guard, Extra, SG) || Guard <- Guards]
end,
- { { clause, Line, TArgs, TGuards, unblock(TExpr) }, SE }.
+ {{clause, Line, TArgs, TGuards, unblock(TExpr)}, SE}.
% Translate/Extract guards from the given expression.
translate_guard(Line, Guard, Extra, S) ->
[element(1, elixir_translator:translate(elixir_quote:linify(Line, Guard), S))|Extra].
-extract_guards({ 'when', _, [Left, Right] }) -> { Left, extract_or_guards(Right) };
-extract_guards(Else) -> { Else, [] }.
+extract_guards({'when', _, [Left, Right]}) -> {Left, extract_or_guards(Right)};
+extract_guards(Else) -> {Else, []}.
-extract_or_guards({ 'when', _, [Left, Right] }) -> [Left|extract_or_guards(Right)];
+extract_or_guards({'when', _, [Left, Right]}) -> [Left|extract_or_guards(Right)];
extract_or_guards(Term) -> [Term].
% Extract guards when multiple left side args are allowed.
-extract_splat_guards([{ 'when', _, [_,_|_] = Args }]) ->
- { Left, Right } = elixir_utils:split_last(Args),
- { Left, extract_or_guards(Right) };
+extract_splat_guards([{'when', _, [_,_|_] = Args}]) ->
+ {Left, Right} = elixir_utils:split_last(Args),
+ {Left, extract_or_guards(Right)};
extract_splat_guards(Else) ->
- { Else, [] }.
+ {Else, []}.
% Function for translating macros with match style like case and receive.
clauses(Meta, Clauses, Return, #elixir_scope{export_vars=CV} = S) ->
- { TC, TS } = do_clauses(Meta, Clauses, Return, S#elixir_scope{export_vars=[]}),
- { TC, TS#elixir_scope{export_vars=elixir_scope:merge_opt_vars(CV, TS#elixir_scope.export_vars)} }.
+ {TC, TS} = do_clauses(Meta, Clauses, Return, S#elixir_scope{export_vars=[]}),
+ {TC, TS#elixir_scope{export_vars=elixir_scope:merge_opt_vars(CV, TS#elixir_scope.export_vars)}}.
do_clauses(_Meta, [], _Return, S) ->
- { [], S };
+ {[], S};
do_clauses(Meta, DecoupledClauses, Return, S) ->
% Transform tree just passing the variables counter forward
% and storing variables defined inside each clause.
Transformer = fun(X, {SAcc, VAcc}) ->
- { TX, TS } = each_clause(X, Return, SAcc),
- { TX,
- { elixir_scope:mergec(S, TS), [TS#elixir_scope.export_vars|VAcc] } }
+ {TX, TS} = each_clause(X, Return, SAcc),
+ {TX,
+ {elixir_scope:mergec(S, TS), [TS#elixir_scope.export_vars|VAcc]}}
end,
- { TClauses, { TS, ReverseCV } } =
+ {TClauses, {TS, ReverseCV}} =
lists:mapfoldl(Transformer, {S, []}, DecoupledClauses),
% Now get all the variables defined inside each clause
@@ -94,7 +94,7 @@ do_clauses(Meta, DecoupledClauses, Return, S) ->
% a list of tuples where the first element is the variable name,
% the second one is the new pointer to the variable and the third
% is the old pointer.
- { FinalVars, FS } = lists:mapfoldl(fun({ Key, Val }, Acc) ->
+ {FinalVars, FS} = lists:mapfoldl(fun({Key, Val}, Acc) ->
normalize_vars(Key, Val, Acc)
end, TS, AllVars),
@@ -104,9 +104,9 @@ do_clauses(Meta, DecoupledClauses, Return, S) ->
expand_clauses(Line, [Clause|T], [ClauseVars|V], FinalVars, Acc, S) ->
case generate_match_vars(FinalVars, ClauseVars, [], []) of
- { [], [] } ->
+ {[], []} ->
expand_clauses(Line, T, V, FinalVars, [Clause|Acc], S);
- { Left, Right } ->
+ {Left, Right} ->
MatchExpr = generate_match(Line, Left, Right),
ClauseExprs = element(5, Clause),
[Final|RawClauseExprs] = lists:reverse(ClauseExprs),
@@ -114,19 +114,19 @@ expand_clauses(Line, [Clause|T], [ClauseVars|V], FinalVars, Acc, S) ->
% If the last sentence has a match clause, we need to assign its value
% in the variable list. If not, we insert the variable list before the
% final clause in order to keep it tail call optimized.
- { FinalClauseExprs, FS } = case has_match_tuple(Final) of
+ {FinalClauseExprs, FS} = case has_match_tuple(Final) of
true ->
case Final of
- { match, _, { var, _, UserVarName } = UserVar, _ } when UserVarName /= '_' ->
- { [UserVar,MatchExpr,Final|RawClauseExprs], S };
+ {match, _, {var, _, UserVarName} = UserVar, _} when UserVarName /= '_' ->
+ {[UserVar,MatchExpr,Final|RawClauseExprs], S};
_ ->
- { VarName, _, SS } = elixir_scope:build_var('_', S),
- StorageVar = { var, Line, VarName },
- StorageExpr = { match, Line, StorageVar, Final },
- { [StorageVar,MatchExpr,StorageExpr|RawClauseExprs], SS }
+ {VarName, _, SS} = elixir_scope:build_var('_', S),
+ StorageVar = {var, Line, VarName},
+ StorageExpr = {match, Line, StorageVar, Final},
+ {[StorageVar,MatchExpr,StorageExpr|RawClauseExprs], SS}
end;
false ->
- { [Final,MatchExpr|RawClauseExprs], S }
+ {[Final,MatchExpr|RawClauseExprs], S}
end,
FinalClause = setelement(5, Clause, lists:reverse(FinalClauseExprs)),
@@ -134,22 +134,22 @@ expand_clauses(Line, [Clause|T], [ClauseVars|V], FinalVars, Acc, S) ->
end;
expand_clauses(_Line, [], [], _FinalVars, Acc, S) ->
- { lists:reverse(Acc), S }.
+ {lists:reverse(Acc), S}.
% Handle each key/value clause pair and translate them accordingly.
-each_clause({ do, Meta, [Condition], Expr }, Return, S) ->
- { Arg, Guards } = extract_guards(Condition),
+each_clause({do, Meta, [Condition], Expr}, Return, S) ->
+ {Arg, Guards} = extract_guards(Condition),
clause(?line(Meta), fun elixir_translator:translate_args/2, [Arg], Expr, Guards, Return, S);
-each_clause({ else, Meta, [Condition], Expr }, Return, S) ->
- { Arg, Guards } = extract_guards(Condition),
+each_clause({else, Meta, [Condition], Expr}, Return, S) ->
+ {Arg, Guards} = extract_guards(Condition),
clause(?line(Meta), fun elixir_translator:translate_args/2, [Arg], Expr, Guards, Return, S);
-each_clause({ 'after', Meta, [Condition], Expr }, Return, S) ->
- { TCondition, SC } = elixir_translator:translate(Condition, S),
- { TExpr, SB } = elixir_translator:translate_block(Expr, Return, SC),
- { { clause, ?line(Meta), [TCondition], [], unblock(TExpr) }, SB }.
+each_clause({'after', Meta, [Condition], Expr}, Return, S) ->
+ {TCondition, SC} = elixir_translator:translate(Condition, S),
+ {TExpr, SB} = elixir_translator:translate_block(Expr, Return, SC),
+ {{clause, ?line(Meta), [TCondition], [], unblock(TExpr)}, SB}.
% Check if the given expression is a match tuple.
% This is a small optimization to allow us to change
@@ -163,7 +163,7 @@ has_match_tuple({'case', _, _, _}) ->
true;
has_match_tuple({match, _, _, _}) ->
true;
-has_match_tuple({'fun', _, { clauses, _ }}) ->
+has_match_tuple({'fun', _, {clauses, _}}) ->
false;
has_match_tuple(H) when is_tuple(H) ->
has_match_tuple(tuple_to_list(H));
@@ -179,39 +179,39 @@ normalize_vars(Key, Value, #elixir_scope{vars=Vars,export_vars=ClauseVars} = S)
VS = S#elixir_scope{
vars=orddict:store(Key, Value, Vars),
export_vars=orddict:store(Key, Value, ClauseVars)
- },
+ },
Expr = case orddict:find(Key, Vars) of
- { ok, { PreValue, _ } } -> { var, 0, PreValue };
- error -> { atom, 0, nil }
+ {ok, {PreValue, _}} -> {var, 0, PreValue};
+ error -> {atom, 0, nil}
end,
- { { Key, Value, Expr }, VS }.
+ {{Key, Value, Expr}, VS}.
% Generate match vars by checking if they were updated
% or not and assigning the previous value.
-generate_match_vars([{ Key, Value, Expr }|T], ClauseVars, Left, Right) ->
+generate_match_vars([{Key, Value, Expr}|T], ClauseVars, Left, Right) ->
case orddict:find(Key, ClauseVars) of
- { ok, Value } ->
+ {ok, Value} ->
generate_match_vars(T, ClauseVars, Left, Right);
- { ok, Clause } ->
+ {ok, Clause} ->
generate_match_vars(T, ClauseVars,
- [{ var, 0, element(1, Value) }|Left],
- [{ var, 0, element(1, Clause) }|Right]);
+ [{var, 0, element(1, Value)}|Left],
+ [{var, 0, element(1, Clause)}|Right]);
error ->
generate_match_vars(T, ClauseVars,
- [{ var, 0, element(1, Value) }|Left], [Expr|Right])
+ [{var, 0, element(1, Value)}|Left], [Expr|Right])
end;
generate_match_vars([], _ClauseVars, Left, Right) ->
- { Left, Right }.
+ {Left, Right}.
generate_match(Line, [Left], [Right]) ->
- { match, Line, Left, Right };
+ {match, Line, Left, Right};
generate_match(Line, LeftVars, RightVars) ->
- { match, Line, { tuple, Line, LeftVars }, { tuple, Line, RightVars } }.
+ {match, Line, {tuple, Line, LeftVars}, {tuple, Line, RightVars}}.
-unblock({ 'block', _, Exprs }) -> Exprs;
+unblock({'block', _, Exprs}) -> Exprs;
unblock(Exprs) -> [Exprs].
diff --git a/lib/elixir/src/elixir_code_server.erl b/lib/elixir/src/elixir_code_server.erl
index 0f2bacb8a..1a1f5e08b 100644
--- a/lib/elixir/src/elixir_code_server.erl
+++ b/lib/elixir/src/elixir_code_server.erl
@@ -30,121 +30,121 @@ init(ok) ->
code:ensure_loaded('Elixir.Macro.Env'),
code:ensure_loaded('Elixir.Module.LocalsTracker'),
code:ensure_loaded('Elixir.Kernel.LexicalTracker'),
- { ok, #elixir_code_server{} }.
+ {ok, #elixir_code_server{}}.
-handle_call({ acquire, Path }, From, Config) ->
+handle_call({acquire, Path}, From, Config) ->
Current = Config#elixir_code_server.loaded,
case orddict:find(Path, Current) of
- { ok, true } ->
- { reply, loaded, Config };
- { ok, { Ref, List } } when is_list(List), is_reference(Ref) ->
- Queued = orddict:store(Path, { Ref, [From|List] }, Current),
- { reply, { queued, Ref }, Config#elixir_code_server{loaded=Queued} };
+ {ok, true} ->
+ {reply, loaded, Config};
+ {ok, {Ref, List}} when is_list(List), is_reference(Ref) ->
+ Queued = orddict:store(Path, {Ref, [From|List]}, Current),
+ {reply, {queued, Ref}, Config#elixir_code_server{loaded=Queued}};
error ->
- Queued = orddict:store(Path, { make_ref(), [] }, Current),
- { reply, proceed, Config#elixir_code_server{loaded=Queued} }
+ Queued = orddict:store(Path, {make_ref(), []}, Current),
+ {reply, proceed, Config#elixir_code_server{loaded=Queued}}
end;
handle_call(loaded, _From, Config) ->
- { reply, [F || { F, true } <- Config#elixir_code_server.loaded], Config };
+ {reply, [F || {F, true} <- Config#elixir_code_server.loaded], Config};
handle_call(at_exit, _From, Config) ->
- { reply, Config#elixir_code_server.at_exit, Config };
+ {reply, Config#elixir_code_server.at_exit, Config};
handle_call(flush_at_exit, _From, Config) ->
- { reply, Config#elixir_code_server.at_exit, Config#elixir_code_server{at_exit=[]} };
+ {reply, Config#elixir_code_server.at_exit, Config#elixir_code_server{at_exit=[]}};
handle_call(argv, _From, Config) ->
- { reply, Config#elixir_code_server.argv, Config };
+ {reply, Config#elixir_code_server.argv, Config};
handle_call(compiler_options, _From, Config) ->
- { reply, Config#elixir_code_server.compiler_options, Config };
+ {reply, Config#elixir_code_server.compiler_options, Config};
-handle_call({ compilation_status, CompilerPid }, _From, Config) ->
+handle_call({compilation_status, CompilerPid}, _From, Config) ->
CompilationStatusList = Config#elixir_code_server.compilation_status,
CompilationStatusListNew = orddict:erase(CompilerPid, CompilationStatusList),
CompilationStatus = orddict:fetch(CompilerPid, CompilationStatusList),
- { reply, CompilationStatus, Config#elixir_code_server{compilation_status=CompilationStatusListNew} };
+ {reply, CompilationStatus, Config#elixir_code_server{compilation_status=CompilationStatusListNew}};
handle_call(retrieve_module_name, _From, Config) ->
case Config#elixir_code_server.pool of
- { [H|T], Counter } ->
- { reply, module_tuple(H), Config#elixir_code_server{pool={T,Counter}} };
- { [], Counter } ->
- { reply, module_tuple(Counter), Config#elixir_code_server{pool={[],Counter+1}} }
+ {[H|T], Counter} ->
+ {reply, module_tuple(H), Config#elixir_code_server{pool={T,Counter}}};
+ {[], Counter} ->
+ {reply, module_tuple(Counter), Config#elixir_code_server{pool={[],Counter+1}}}
end;
handle_call(erl_compiler_options, _From, Config) ->
case Config#elixir_code_server.erl_compiler_options of
nil ->
Opts = erl_compiler_options(),
- { reply, Opts, Config#elixir_code_server{erl_compiler_options=Opts} };
+ {reply, Opts, Config#elixir_code_server{erl_compiler_options=Opts}};
Opts ->
- { reply, Opts, Config }
+ {reply, Opts, Config}
end;
handle_call(Request, _From, Config) ->
- { stop, { badcall, Request }, Config }.
+ {stop, {badcall, Request}, Config}.
-handle_cast({ at_exit, AtExit }, Config) ->
- { noreply, Config#elixir_code_server{at_exit=[AtExit|Config#elixir_code_server.at_exit]} };
+handle_cast({at_exit, AtExit}, Config) ->
+ {noreply, Config#elixir_code_server{at_exit=[AtExit|Config#elixir_code_server.at_exit]}};
-handle_cast({ argv, Argv }, Config) ->
- { noreply, Config#elixir_code_server{argv=Argv} };
+handle_cast({argv, Argv}, Config) ->
+ {noreply, Config#elixir_code_server{argv=Argv}};
-handle_cast({ compiler_options, Options }, Config) ->
+handle_cast({compiler_options, Options}, Config) ->
Final = orddict:merge(fun(_,_,V) -> V end, Config#elixir_code_server.compiler_options, Options),
- { noreply, Config#elixir_code_server{compiler_options=Final} };
+ {noreply, Config#elixir_code_server{compiler_options=Final}};
-handle_cast({ register_warning, CompilerPid }, Config) ->
+handle_cast({register_warning, CompilerPid}, Config) ->
CompilationStatusCurrent = Config#elixir_code_server.compilation_status,
CompilationStatusNew = orddict:store(CompilerPid, error, CompilationStatusCurrent),
case orddict:find(warnings_as_errors, Config#elixir_code_server.compiler_options) of
- { ok, true } -> { noreply, Config#elixir_code_server{compilation_status=CompilationStatusNew} };
- _ -> { noreply, Config }
+ {ok, true} -> {noreply, Config#elixir_code_server{compilation_status=CompilationStatusNew}};
+ _ -> {noreply, Config}
end;
-handle_cast({ reset_warnings, CompilerPid }, Config) ->
+handle_cast({reset_warnings, CompilerPid}, Config) ->
CompilationStatusCurrent = Config#elixir_code_server.compilation_status,
CompilationStatusNew = orddict:store(CompilerPid, ok, CompilationStatusCurrent),
- { noreply, Config#elixir_code_server{compilation_status=CompilationStatusNew} };
+ {noreply, Config#elixir_code_server{compilation_status=CompilationStatusNew}};
-handle_cast({ loaded, Path }, Config) ->
+handle_cast({loaded, Path}, Config) ->
Current = Config#elixir_code_server.loaded,
case orddict:find(Path, Current) of
- { ok, true } ->
- { noreply, Config };
- { ok, { Ref, List } } when is_list(List), is_reference(Ref) ->
- [Pid ! { elixir_code_server, Ref, loaded } || { Pid, _Tag } <- lists:reverse(List)],
+ {ok, true} ->
+ {noreply, Config};
+ {ok, {Ref, List}} when is_list(List), is_reference(Ref) ->
+ [Pid ! {elixir_code_server, Ref, loaded} || {Pid, _Tag} <- lists:reverse(List)],
Done = orddict:store(Path, true, Current),
- { noreply, Config#elixir_code_server{loaded=Done} };
+ {noreply, Config#elixir_code_server{loaded=Done}};
error ->
Done = orddict:store(Path, true, Current),
- { noreply, Config#elixir_code_server{loaded=Done} }
+ {noreply, Config#elixir_code_server{loaded=Done}}
end;
-handle_cast({ unload_files, Files }, Config) ->
+handle_cast({unload_files, Files}, Config) ->
Current = Config#elixir_code_server.loaded,
Unloaded = lists:foldl(fun(File, Acc) -> orddict:erase(File, Acc) end, Current, Files),
- { noreply, Config#elixir_code_server{loaded=Unloaded} };
+ {noreply, Config#elixir_code_server{loaded=Unloaded}};
-handle_cast({ return_module_name, H }, #elixir_code_server{pool={T,Counter}} = Config) ->
- { noreply, Config#elixir_code_server{pool={[H|T],Counter}} };
+handle_cast({return_module_name, H}, #elixir_code_server{pool={T,Counter}} = Config) ->
+ {noreply, Config#elixir_code_server{pool={[H|T],Counter}}};
handle_cast(Request, Config) ->
- { stop, { badcast, Request }, Config }.
+ {stop, {badcast, Request}, Config}.
handle_info(_Request, Config) ->
- { noreply, Config }.
+ {noreply, Config}.
terminate(_Reason, _Config) ->
ok.
code_change(_Old, Config, _Extra) ->
- { ok, Config }.
+ {ok, Config}.
module_tuple(I) ->
- { list_to_atom("elixir_compiler_" ++ integer_to_list(I)), I }.
+ {list_to_atom("elixir_compiler_" ++ integer_to_list(I)), I}.
erl_compiler_options() ->
Key = "ERL_COMPILER_OPTIONS",
diff --git a/lib/elixir/src/elixir_compiler.erl b/lib/elixir/src/elixir_compiler.erl
index 1d4649595..21ae4a05c 100644
--- a/lib/elixir/src/elixir_compiler.erl
+++ b/lib/elixir/src/elixir_compiler.erl
@@ -9,7 +9,7 @@ get_opt(Key) ->
Dict = elixir_code_server:call(compiler_options),
case lists:keyfind(Key, 1, Dict) of
false -> false;
- { Key, Value } -> Value
+ {Key, Value} -> Value
end.
%% Compilation entry points.
@@ -35,7 +35,7 @@ quoted(Forms, File) when is_binary(File) ->
file(Relative) when is_binary(Relative) ->
File = filename:absname(Relative),
- { ok, Bin } = file:read_file(File),
+ {ok, Bin} = file:read_file(File),
string(elixir_utils:characters_to_list(Bin), File).
file_to_path(File, Path) when is_binary(File), is_binary(Path) ->
@@ -52,21 +52,21 @@ eval_forms(Forms, Vars, E) ->
end.
eval_compilation(Forms, Vars, E) ->
- Binding = [{ Key, Value } || { _Name, _Kind, Key, Value } <- Vars],
- { Result, _Binding, EE, _S } = elixir:eval_forms(Forms, Binding, E),
- { Result, EE }.
+ Binding = [{Key, Value} || {_Name, _Kind, Key, Value} <- Vars],
+ {Result, _Binding, EE, _S} = elixir:eval_forms(Forms, Binding, E),
+ {Result, EE}.
code_loading_compilation(Forms, Vars, #elixir_env{line=Line} = E) ->
- Dict = [{ { Name, Kind }, { Value, 0 } } || { Name, Kind, Value, _ } <- Vars],
+ Dict = [{{Name, Kind}, {Value, 0}} || {Name, Kind, Value, _} <- Vars],
S = elixir_env:env_to_scope_with_vars(E, Dict),
- { Expr, EE, _S } = elixir:quoted_to_erl(Forms, E, S),
+ {Expr, EE, _S} = elixir:quoted_to_erl(Forms, E, S),
- { Module, I } = retrieve_module_name(),
+ {Module, I} = retrieve_module_name(),
Fun = code_fun(E#elixir_env.module),
Form = code_mod(Fun, Expr, Line, E#elixir_env.file, Module, Vars),
- Args = list_to_tuple([V || { _, _, _, V } <- Vars]),
+ Args = list_to_tuple([V || {_, _, _, V} <- Vars]),
- %% Pass { native, false } to speed up bootstrap
+ %% Pass {native, false} to speed up bootstrap
%% process when native is set to true
module(Form, E#elixir_env.file, [{native,false}], true, fun(_, Binary) ->
%% If we have labeled locals, anonymous functions
@@ -74,7 +74,7 @@ code_loading_compilation(Forms, Vars, #elixir_env{line=Line} = E) ->
%% module
Purgeable =
case beam_lib:chunks(Binary, [labeled_locals]) of
- { ok, { _, [{ labeled_locals, []}] } } -> true;
+ {ok, {_, [{labeled_locals, []}]}} -> true;
_ -> false
end,
dispatch_loaded(Module, Fun, Args, Purgeable, I, EE)
@@ -89,34 +89,34 @@ dispatch_loaded(Module, Fun, Args, Purgeable, I, E) ->
true ->
ok
end,
- { Res, E }.
+ {Res, E}.
code_fun(nil) -> '__FILE__';
code_fun(_) -> '__MODULE__'.
code_mod(Fun, Expr, Line, File, Module, Vars) when is_binary(File), is_integer(Line) ->
- Tuple = { tuple, Line, [{ var, Line, K } || { _, _, K, _ } <- Vars] },
+ Tuple = {tuple, Line, [{var, Line, K} || {_, _, K, _} <- Vars]},
Relative = elixir_utils:relative_to_cwd(File),
[
- { attribute, Line, file, { elixir_utils:characters_to_list(Relative), 1 } },
- { attribute, Line, module, Module },
- { attribute, Line, export, [{ Fun, 1 }, { '__RELATIVE__', 0 }] },
- { function, Line, Fun, 1, [
- { clause, Line, [Tuple], [], [Expr] }
- ] },
- { function, Line, '__RELATIVE__', 0, [
- { clause, Line, [], [], [elixir_utils:elixir_to_erl(Relative)] }
- ] }
+ {attribute, Line, file, {elixir_utils:characters_to_list(Relative), 1}},
+ {attribute, Line, module, Module},
+ {attribute, Line, export, [{Fun, 1}, {'__RELATIVE__', 0}]},
+ {function, Line, Fun, 1, [
+ {clause, Line, [Tuple], [], [Expr]}
+ ]},
+ {function, Line, '__RELATIVE__', 0, [
+ {clause, Line, [], [], [elixir_utils:elixir_to_erl(Relative)]}
+ ]}
].
retrieve_module_name() ->
elixir_code_server:call(retrieve_module_name).
return_module_name(I) ->
- elixir_code_server:cast({ return_module_name, I }).
+ elixir_code_server:cast({return_module_name, I}).
-allows_fast_compilation({ '__block__', _, Exprs }) ->
+allows_fast_compilation({'__block__', _, Exprs}) ->
lists:all(fun allows_fast_compilation/1, Exprs);
allows_fast_compilation({defmodule,_,_}) -> true;
allows_fast_compilation(_) -> false.
@@ -154,7 +154,7 @@ module(Forms, File, RawOptions, Bootstrap, Callback) when
core() ->
application:start(elixir),
- elixir_code_server:cast({ compiler_options, [{docs,false},{internal,true}] }),
+ elixir_code_server:cast({compiler_options, [{docs,false},{internal,true}]}),
[core_file(File) || File <- core_main()].
core_file(File) ->
@@ -210,7 +210,7 @@ binary_to_path({ModuleName, Binary}, CompilePath) ->
%% ERROR HANDLING
format_errors([]) ->
- exit({ nocompile, "compilation failed but no error was raised" });
+ exit({nocompile, "compilation failed but no error was raised"});
format_errors(Errors) ->
lists:foreach(fun ({File, Each}) ->
diff --git a/lib/elixir/src/elixir_counter.erl b/lib/elixir/src/elixir_counter.erl
index 78be766a6..35cfd1603 100644
--- a/lib/elixir/src/elixir_counter.erl
+++ b/lib/elixir/src/elixir_counter.erl
@@ -13,24 +13,24 @@ start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, 0, []).
init(Counter) ->
- { ok, Counter }.
+ {ok, Counter}.
handle_call(next, _From, Counter) ->
- { reply, Counter, bump(Counter) };
+ {reply, Counter, bump(Counter)};
handle_call(Request, _From, Counter) ->
- { stop, { badcall, Request }, Counter }.
+ {stop, {badcall, Request}, Counter}.
handle_cast(Request, Counter) ->
- { stop, { badcast, Request }, Counter }.
+ {stop, {badcast, Request}, Counter}.
handle_info(_Request, Counter) ->
- { noreply, Counter }.
+ {noreply, Counter}.
terminate(_Reason, _Counter) ->
ok.
code_change(_Old, Counter, _Extra) ->
- { ok, Counter }.
+ {ok, Counter}.
bump(Counter) when Counter < ?limit ->
Counter + 1;
diff --git a/lib/elixir/src/elixir_def.erl b/lib/elixir/src/elixir_def.erl
index 36c2edf81..40587b56d 100644
--- a/lib/elixir/src/elixir_def.erl
+++ b/lib/elixir/src/elixir_def.erl
@@ -18,8 +18,8 @@ clauses_table(Module) ->
ets:lookup_element(Module, ?clauses_attr, 2).
setup(Module) ->
- ets:insert(Module, { ?attr, ets:new(Module, [set, public]) }),
- ets:insert(Module, { ?clauses_attr, ets:new(Module, [bag, public]) }),
+ ets:insert(Module, {?attr, ets:new(Module, [set, public])}),
+ ets:insert(Module, {?clauses_attr, ets:new(Module, [bag, public])}),
reset_last(Module),
ok.
@@ -29,14 +29,14 @@ cleanup(Module) ->
%% Reset the last item. Useful when evaling code.
reset_last(Module) ->
- ets:insert(table(Module), { last, [] }).
+ ets:insert(table(Module), {last, []}).
%% Looks up a definition from the database.
lookup_definition(Module, Tuple) ->
case ets:lookup(table(Module), Tuple) of
[Result] ->
CTable = clauses_table(Module),
- { Result, [Clause || { _, Clause } <- ets:lookup(CTable, Tuple)] };
+ {Result, [Clause || {_, Clause} <- ets:lookup(CTable, Tuple)]};
_ ->
false
end.
@@ -50,25 +50,25 @@ delete_definition(Module, Tuple) ->
store_definition(Line, Kind, CheckClauses, Call, Body, Pos) ->
E = (elixir_locals:get_cached_env(Pos))#elixir_env{line=Line},
- { NameAndArgs, Guards } = elixir_clauses:extract_guards(Call),
+ {NameAndArgs, Guards} = elixir_clauses:extract_guards(Call),
- { Name, Args } = case NameAndArgs of
- { N, _, A } when is_atom(N), is_atom(A) -> { N, [] };
- { N, _, A } when is_atom(N), is_list(A) -> { N, A };
- _ -> elixir_errors:form_error(Line, E#elixir_env.file, ?MODULE, { invalid_def, Kind, NameAndArgs })
+ {Name, Args} = case NameAndArgs of
+ {N, _, A} when is_atom(N), is_atom(A) -> {N, []};
+ {N, _, A} when is_atom(N), is_list(A) -> {N, A};
+ _ -> elixir_errors:form_error(Line, E#elixir_env.file, ?MODULE, {invalid_def, Kind, NameAndArgs})
end,
%% Now that we have verified the call format,
%% extract meta information like file and context.
- { _, Meta, _ } = Call,
+ {_, Meta, _} = Call,
DoCheckClauses = (not lists:keymember(context, 1, Meta)) andalso (CheckClauses),
%% Check if there is a file information in the definition.
%% If so, we assume this come from another source and we need
%% to linify taking into account keep line numbers.
- { File, Key } = case lists:keyfind(file, 1, Meta) of
- { file, Bin } when is_binary(Bin) -> { Bin, keep };
- _ -> { nil, line }
+ {File, Key} = case lists:keyfind(file, 1, Meta) of
+ {file, Bin} when is_binary(Bin) -> {Bin, keep};
+ _ -> {nil, line}
end,
LinifyArgs = elixir_quote:linify(Line, Key, Args),
@@ -81,12 +81,12 @@ store_definition(Line, Kind, CheckClauses, Call, Body, Pos) ->
store_definition(Line, Kind, CheckClauses, Name, Args, Guards, Body, MetaFile, #elixir_env{module=Module} = ER) ->
Arity = length(Args),
- Tuple = { Name, Arity },
+ Tuple = {Name, Arity},
E = ER#elixir_env{function=Tuple},
elixir_locals:record_definition(Tuple, Kind, Module),
Location = retrieve_location(Line, MetaFile, Module),
- { Function, Defaults, Super } = translate_definition(Kind, Line, Module, Name, Args, Guards, Body, E),
+ {Function, Defaults, Super} = translate_definition(Kind, Line, Module, Name, Args, Guards, Body, E),
DefaultsLength = length(Defaults),
elixir_locals:record_defaults(Tuple, Kind, Module, DefaultsLength),
@@ -104,7 +104,7 @@ store_definition(Line, Kind, CheckClauses, Name, Args, Guards, Body, MetaFile, #
default_function_for(Kind, Name, Default)) || Default <- Defaults],
make_struct_available(Kind, Module, Name, Args),
- { Name, Arity }.
+ {Name, Arity}.
%% @on_definition
@@ -113,15 +113,15 @@ run_on_definition_callbacks(Kind, Line, Module, Name, Args, Guards, Expr, E) ->
true ->
ok;
_ ->
- Env = elixir_env:env_to_ex({ Line, E }),
+ Env = elixir_env:env_to_ex({Line, E}),
Callbacks = 'Elixir.Module':get_attribute(Module, on_definition),
- [Mod:Fun(Env, Kind, Name, Args, Guards, Expr) || { Mod, Fun } <- Callbacks]
+ [Mod:Fun(Env, Kind, Name, Args, Guards, Expr) || {Mod, Fun} <- Callbacks]
end.
make_struct_available(def, Module, '__struct__', []) ->
case erlang:get(elixir_compiler_pid) of
undefined -> ok;
- Pid -> Pid ! { struct_available, Module }
+ Pid -> Pid ! {struct_available, Module}
end;
make_struct_available(_, _, _, _) ->
ok.
@@ -134,13 +134,13 @@ retrieve_location(Line, File, Module) ->
X when X == nil; X == false ->
if
is_binary(File) ->
- { elixir_utils:characters_to_list(elixir_utils:relative_to_cwd(File)), Line };
+ {elixir_utils:characters_to_list(elixir_utils:relative_to_cwd(File)), Line};
true ->
nil
end;
X when is_binary(X) ->
'Elixir.Module':delete_attribute(Module, file),
- { elixir_utils:characters_to_list(X), 0 }
+ {elixir_utils:characters_to_list(X), 0}
end.
%% Compile super
@@ -155,49 +155,49 @@ compile_super(_Module, _, _E) -> ok.
translate_definition(Kind, Line, Module, Name, Args, Guards, Body, E) when is_integer(Line) ->
Arity = length(Args),
- { EArgs, EGuards, EBody, _ } = elixir_exp_clauses:def(fun elixir_def_defaults:expand/2,
+ {EArgs, EGuards, EBody, _} = elixir_exp_clauses:def(fun elixir_def_defaults:expand/2,
Args, Guards, expr_from_body(Line, Body), E),
%% Macros receive a special argument on invocation. Notice it does
%% not affect the arity of the stored function, but the clause
%% already contains it.
EAllArgs = case is_macro(Kind) of
- true -> [{ '_@CALLER', [{line,Line}], nil }|EArgs];
+ true -> [{'_@CALLER', [{line,Line}], nil}|EArgs];
false -> EArgs
end,
S = elixir_env:env_to_scope(E),
- { Unpacked, Defaults } = elixir_def_defaults:unpack(Kind, Name, EAllArgs, S),
- { Clauses, Super } = translate_clause(Body, Line, Kind, Unpacked, EGuards, EBody, S),
+ {Unpacked, Defaults} = elixir_def_defaults:unpack(Kind, Name, EAllArgs, S),
+ {Clauses, Super} = translate_clause(Body, Line, Kind, Unpacked, EGuards, EBody, S),
run_on_definition_callbacks(Kind, Line, Module, Name, EArgs, EGuards, EBody, E),
- Function = { function, Line, Name, Arity, Clauses },
- { Function, Defaults, Super }.
+ Function = {function, Line, Name, Arity, Clauses},
+ {Function, Defaults, Super}.
translate_clause(nil, _Line, _Kind, _Args, [], _Body, _S) ->
- { [], false };
+ {[], false};
translate_clause(nil, Line, Kind, _Args, _Guards, _Body, #elixir_scope{file=File}) ->
- elixir_errors:form_error(Line, File, ?MODULE, { missing_do, Kind });
+ elixir_errors:form_error(Line, File, ?MODULE, {missing_do, Kind});
translate_clause(_, Line, Kind, Args, Guards, Body, S) ->
- { TClause, TS } = elixir_clauses:clause(Line,
+ {TClause, TS} = elixir_clauses:clause(Line,
fun elixir_translator:translate_args/2, Args, Body, Guards, true, S),
%% Set __CALLER__ if used
FClause = case is_macro(Kind) andalso TS#elixir_scope.caller of
true ->
- FBody = { 'match', Line,
- { 'var', Line, '__CALLER__' },
- ?wrap_call(Line, elixir_env, env_to_ex, [{ var, Line, '_@CALLER' }])
- },
+ FBody = {'match', Line,
+ {'var', Line, '__CALLER__'},
+ ?wrap_call(Line, elixir_env, env_to_ex, [{var, Line, '_@CALLER'}])
+ },
setelement(5, TClause, [FBody|element(5, TClause)]);
false -> TClause
end,
- { [FClause], TS#elixir_scope.super }.
+ {[FClause], TS#elixir_scope.super}.
expr_from_body(_Line, nil) -> nil;
-expr_from_body(_Line, [{ do, Expr }]) -> Expr;
-expr_from_body(Line, Else) -> { 'try', [{line,Line}], [Else] }.
+expr_from_body(_Line, [{do, Expr}]) -> Expr;
+expr_from_body(Line, Else) -> {'try', [{line,Line}], [Else]}.
is_macro(defmacro) -> true;
is_macro(defmacrop) -> true;
@@ -214,18 +214,18 @@ unwrap_definitions(Module) ->
unwrap_definition([Fun|T], CTable, All, Exports, Private, Def, Defmacro, Functions, Tail) ->
Tuple = element(1, Fun),
- Clauses = [Clause || { _, Clause } <- ets:lookup(CTable, Tuple)],
+ Clauses = [Clause || {_, Clause} <- ets:lookup(CTable, Tuple)],
- { NewFun, NewExports, NewPrivate, NewDef, NewDefmacro } =
+ {NewFun, NewExports, NewPrivate, NewDef, NewDefmacro} =
case Clauses of
- [] -> { false, Exports, Private, Def, Defmacro };
+ [] -> {false, Exports, Private, Def, Defmacro};
_ -> unwrap_definition(element(2, Fun), Tuple, Fun, Exports, Private, Def, Defmacro)
end,
- { NewFunctions, NewTail } = case NewFun of
+ {NewFunctions, NewTail} = case NewFun of
false ->
NewAll = All,
- { Functions, Tail };
+ {Functions, Tail};
_ ->
NewAll = [Tuple|All],
function_for_stored_definition(NewFun, Clauses, Functions, Tail)
@@ -234,49 +234,49 @@ unwrap_definition([Fun|T], CTable, All, Exports, Private, Def, Defmacro, Functio
unwrap_definition(T, CTable, NewAll, NewExports, NewPrivate,
NewDef, NewDefmacro, NewFunctions, NewTail);
unwrap_definition([], _CTable, All, Exports, Private, Def, Defmacro, Functions, Tail) ->
- { All, Exports, Private, ordsets:from_list(Def),
- ordsets:from_list(Defmacro), lists:reverse(Tail ++ Functions) }.
+ {All, Exports, Private, ordsets:from_list(Def),
+ ordsets:from_list(Defmacro), lists:reverse(Tail ++ Functions)}.
unwrap_definition(def, Tuple, Fun, Exports, Private, Def, Defmacro) ->
- { Fun, [Tuple|Exports], Private, [Tuple|Def], Defmacro };
-unwrap_definition(defmacro, { Name, Arity } = Tuple, Fun, Exports, Private, Def, Defmacro) ->
- Macro = { ?elixir_macro(Name), Arity + 1 },
- { setelement(1, Fun, Macro), [Macro|Exports], Private, Def, [Tuple|Defmacro] };
+ {Fun, [Tuple|Exports], Private, [Tuple|Def], Defmacro};
+unwrap_definition(defmacro, {Name, Arity} = Tuple, Fun, Exports, Private, Def, Defmacro) ->
+ Macro = {?elixir_macro(Name), Arity + 1},
+ {setelement(1, Fun, Macro), [Macro|Exports], Private, Def, [Tuple|Defmacro]};
unwrap_definition(defp, Tuple, Fun, Exports, Private, Def, Defmacro) ->
- %% { Name, Arity }, Kind, Line, Check, Defaults
- Info = { Tuple, defp, element(3, Fun), element(5, Fun), element(7, Fun) },
- { Fun, Exports, [Info|Private], Def, Defmacro };
+ %% {Name, Arity}, Kind, Line, Check, Defaults
+ Info = {Tuple, defp, element(3, Fun), element(5, Fun), element(7, Fun)},
+ {Fun, Exports, [Info|Private], Def, Defmacro};
unwrap_definition(defmacrop, Tuple, Fun, Exports, Private, Def, Defmacro) ->
- %% { Name, Arity }, Kind, Line, Check, Defaults
- Info = { Tuple, defmacrop, element(3, Fun), element(5, Fun), element(7, Fun) },
- { false, Exports, [Info|Private], Def, Defmacro }.
+ %% {Name, Arity}, Kind, Line, Check, Defaults
+ Info = {Tuple, defmacrop, element(3, Fun), element(5, Fun), element(7, Fun)},
+ {false, Exports, [Info|Private], Def, Defmacro}.
%% Helpers
function_for_stored_definition({{Name,Arity}, _, Line, _, _, nil, _}, Clauses, Functions, Tail) ->
- { [{ function, Line, Name, Arity, Clauses }|Functions], Tail };
+ {[{function, Line, Name, Arity, Clauses}|Functions], Tail};
function_for_stored_definition({{Name,Arity}, _, Line, _, _, Location, _}, Clauses, Functions, Tail) ->
- { Functions, [
- { function, Line, Name, Arity, Clauses },
- { attribute, Line, file, Location } | Tail
- ] }.
+ {Functions, [
+ {function, Line, Name, Arity, Clauses},
+ {attribute, Line, file, Location} | Tail
+ ]}.
-default_function_for(Kind, Name, { clause, Line, Args, _Guards, _Exprs } = Clause)
+default_function_for(Kind, Name, {clause, Line, Args, _Guards, _Exprs} = Clause)
when Kind == defmacro; Kind == defmacrop ->
- { function, Line, Name, length(Args) - 1, [Clause] };
+ {function, Line, Name, length(Args) - 1, [Clause]};
-default_function_for(_, Name, { clause, Line, Args, _Guards, _Exprs } = Clause) ->
- { function, Line, Name, length(Args), [Clause] }.
+default_function_for(_, Name, {clause, Line, Args, _Guards, _Exprs} = Clause) ->
+ {function, Line, Name, length(Args), [Clause]}.
%% Store each definition in the table.
%% This function also checks and emit warnings in case
%% the kind, of the visibility of the function changes.
store_each(Check, Kind, File, Location, Table, CTable, Defaults, {function, Line, Name, Arity, Clauses}) ->
- Tuple = { Name, Arity },
+ Tuple = {Name, Arity},
case ets:lookup(Table, Tuple) of
- [{ Tuple, StoredKind, StoredLine, StoredFile, StoredCheck, StoredLocation, StoredDefaults }] ->
+ [{Tuple, StoredKind, StoredLine, StoredFile, StoredCheck, StoredLocation, StoredDefaults}] ->
FinalLine = StoredLine,
FinalLocation = StoredLocation,
FinalDefaults = max(Defaults, StoredDefaults),
@@ -289,16 +289,16 @@ store_each(Check, Kind, File, Location, Table, CTable, Defaults, {function, Line
FinalLocation = Location,
FinalDefaults = Defaults
end,
- Check andalso ets:insert(Table, { last, { Name, Arity } }),
- ets:insert(CTable, [{ Tuple, Clause } || Clause <- Clauses ]),
- ets:insert(Table, { Tuple, Kind, FinalLine, File, Check, FinalLocation, FinalDefaults }).
+ Check andalso ets:insert(Table, {last, {Name, Arity}}),
+ ets:insert(CTable, [{Tuple, Clause} || Clause <- Clauses ]),
+ ets:insert(Table, {Tuple, Kind, FinalLine, File, Check, FinalLocation, FinalDefaults}).
%% Validations
check_valid_kind(_Line, _File, _Name, _Arity, Kind, Kind) -> [];
check_valid_kind(Line, File, Name, Arity, Kind, StoredKind) ->
elixir_errors:form_error(Line, File, ?MODULE,
- { changed_kind, { Name, Arity, StoredKind, Kind } }).
+ {changed_kind, {Name, Arity, StoredKind, Kind}}).
check_valid_clause(Line, File, Name, Arity, Kind, Table, StoredLine, StoredFile) ->
case ets:lookup_element(Table, last, 2) of
@@ -306,21 +306,21 @@ check_valid_clause(Line, File, Name, Arity, Kind, Table, StoredLine, StoredFile)
[] -> [];
_ ->
Relative = elixir_utils:relative_to_cwd(StoredFile),
- elixir_errors:handle_file_warning(File, { Line, ?MODULE,
- { ungrouped_clause, { Kind, Name, Arity, StoredLine, Relative } } })
+ elixir_errors:handle_file_warning(File, {Line, ?MODULE,
+ {ungrouped_clause, {Kind, Name, Arity, StoredLine, Relative}}})
end.
check_valid_defaults(_Line, _File, _Name, _Arity, _Kind, 0, _) -> [];
check_valid_defaults(Line, File, Name, Arity, Kind, _, 0) ->
- elixir_errors:handle_file_warning(File, { Line, ?MODULE, { out_of_order_defaults, { Kind, Name, Arity } } });
+ elixir_errors:handle_file_warning(File, {Line, ?MODULE, {out_of_order_defaults, {Kind, Name, Arity}}});
check_valid_defaults(Line, File, Name, Arity, Kind, _, _) ->
- elixir_errors:form_error(Line, File, ?MODULE, { clauses_with_defaults, { Kind, Name, Arity } }).
+ elixir_errors:form_error(Line, File, ?MODULE, {clauses_with_defaults, {Kind, Name, Arity}}).
check_previous_defaults(Table, Line, Name, Arity, Kind, Defaults, E) ->
- Matches = ets:match(Table, { { Name, '$2' }, '$1', '_', '_', '_', '_', '$3' }),
+ Matches = ets:match(Table, {{Name, '$2'}, '$1', '_', '_', '_', '_', '$3'}),
[ begin
elixir_errors:form_error(Line, E#elixir_env.file, ?MODULE,
- { defs_with_defaults, Name, { Kind, Arity }, { K, A } })
+ {defs_with_defaults, Name, {Kind, Arity}, {K, A}})
end || [K, A, D] <- Matches, A /= Arity, D /= 0, defaults_conflict(A, D, Arity, Defaults)].
defaults_conflict(A, D, Arity, Defaults) ->
@@ -328,7 +328,7 @@ defaults_conflict(A, D, Arity, Defaults) ->
((A >= (Arity - Defaults)) andalso (A < Arity)).
assert_no_aliases_name(Line, '__aliases__', [Atom], #elixir_env{file=File}) when is_atom(Atom) ->
- elixir_errors:form_error(Line, File, ?MODULE, { no_alias, Atom });
+ elixir_errors:form_error(Line, File, ?MODULE, {no_alias, Atom});
assert_no_aliases_name(_Meta, _Aliases, _Args, _S) ->
ok.
@@ -338,11 +338,11 @@ assert_no_aliases_name(_Meta, _Aliases, _Args, _S) ->
format_error({no_module,{Kind,Name,Arity}}) ->
io_lib:format("cannot define function outside module, invalid scope for ~ts ~ts/~B", [Kind, Name, Arity]);
-format_error({defs_with_defaults, Name, { Kind, Arity }, { K, A } }) when Arity > A ->
+format_error({defs_with_defaults, Name, {Kind, Arity}, {K, A}}) when Arity > A ->
io_lib:format("~ts ~ts/~B defaults conflicts with ~ts ~ts/~B",
[Kind, Name, Arity, K, Name, A]);
-format_error({defs_with_defaults, Name, { Kind, Arity }, { K, A } }) when Arity < A ->
+format_error({defs_with_defaults, Name, {Kind, Arity}, {K, A}}) when Arity < A ->
io_lib:format("~ts ~ts/~B conflicts with defaults from ~ts ~ts/~B",
[Kind, Name, Arity, K, Name, A]);
diff --git a/lib/elixir/src/elixir_def_defaults.erl b/lib/elixir/src/elixir_def_defaults.erl
index 5716a4d9f..6df24fc59 100644
--- a/lib/elixir/src/elixir_def_defaults.erl
+++ b/lib/elixir/src/elixir_def_defaults.erl
@@ -5,10 +5,10 @@
expand(Args, E) ->
lists:mapfoldl(fun
- ({ '\\\\', Meta, [Left, Right] }, Acc) ->
- { ELeft, EL } = elixir_exp:expand(Left, Acc),
- { ERight, _ } = elixir_exp:expand(Right, Acc#elixir_env{context=nil}),
- { { '\\\\', Meta, [ELeft, ERight] }, EL };
+ ({'\\\\', Meta, [Left, Right]}, Acc) ->
+ {ELeft, EL} = elixir_exp:expand(Left, Acc),
+ {ERight, _} = elixir_exp:expand(Right, Acc#elixir_env{context=nil}),
+ {{'\\\\', Meta, [ELeft, ERight]}, EL};
(Left, Acc) ->
elixir_exp:expand(Left, Acc)
end, E, Args).
@@ -23,24 +23,24 @@ unpack(Kind, Name, Args, S) ->
%% clauses and a list of clauses for the default calls.
unpack_each(Kind, Name, [{'\\\\', Line, [Expr, _]}|T] = List, Acc, Clauses, S) ->
Base = build_match(Acc, Line, []),
- { Args, Invoke } = extract_defaults(List, Line, length(Base), [], []),
+ {Args, Invoke} = extract_defaults(List, Line, length(Base), [], []),
- { DefArgs, SA } = elixir_clauses:match(fun elixir_translator:translate_args/2, Base ++ Args, S),
- { DefInvoke, _ } = elixir_translator:translate_args(Base ++ Invoke, SA),
+ {DefArgs, SA} = elixir_clauses:match(fun elixir_translator:translate_args/2, Base ++ Args, S),
+ {DefInvoke, _} = elixir_translator:translate_args(Base ++ Invoke, SA),
- Call = { call, Line,
- { atom, Line, name_for_kind(Kind, Name) },
+ Call = {call, Line,
+ {atom, Line, name_for_kind(Kind, Name)},
DefInvoke
- },
+ },
- Clause = { clause, Line, DefArgs, [], [Call] },
+ Clause = {clause, Line, DefArgs, [], [Call]},
unpack_each(Kind, Name, T, [Expr|Acc], [Clause|Clauses], S);
unpack_each(Kind, Name, [H|T], Acc, Clauses, S) ->
unpack_each(Kind, Name, T, [H|Acc], Clauses, S);
unpack_each(_Kind, _Name, [], Acc, Clauses, _S) ->
- { lists:reverse(Acc), lists:reverse(Clauses) }.
+ {lists:reverse(Acc), lists:reverse(Clauses)}.
% Extract default values from args following the current default clause.
@@ -48,18 +48,18 @@ extract_defaults([{'\\\\', _, [_Expr, Default]}|T], Line, Counter, NewArgs, NewI
extract_defaults(T, Line, Counter, NewArgs, [Default|NewInvoke]);
extract_defaults([_|T], Line, Counter, NewArgs, NewInvoke) ->
- H = { ?atom_concat(["_@D", Counter]), Line, nil },
+ H = {?atom_concat(["_@D", Counter]), Line, nil},
extract_defaults(T, Line, Counter + 1, [H|NewArgs], [H|NewInvoke]);
extract_defaults([], _Line, _Counter, NewArgs, NewInvoke) ->
- { lists:reverse(NewArgs), lists:reverse(NewInvoke) }.
+ {lists:reverse(NewArgs), lists:reverse(NewInvoke)}.
% Build matches for all the previous argument until the current default clause.
build_match([], _Line, Acc) -> Acc;
build_match([_|T], Line, Acc) ->
- Var = { ?atom_concat(["_@D", length(T)]), Line, nil },
+ Var = {?atom_concat(["_@D", length(T)]), Line, nil},
build_match(T, Line, [Var|Acc]).
% Given the invoked function name based on the kind
diff --git a/lib/elixir/src/elixir_def_overridable.erl b/lib/elixir/src/elixir_def_overridable.erl
index adaee4780..fcca8839e 100644
--- a/lib/elixir/src/elixir_def_overridable.erl
+++ b/lib/elixir/src/elixir_def_overridable.erl
@@ -8,15 +8,15 @@ overridable(Module) ->
ets:lookup_element(elixir_module:data_table(Module), '__overridable', 2).
overridable(Module, Value) ->
- ets:insert(elixir_module:data_table(Module), { '__overridable', Value }).
+ ets:insert(elixir_module:data_table(Module), {'__overridable', Value}).
%% Check if an overridable function is defined.
ensure_defined(Meta, Module, Tuple, S) ->
Overridable = overridable(Module),
case orddict:find(Tuple, Overridable) of
- { ok, { _, _, _, _ } } -> ok;
- _ -> elixir_errors:form_error(Meta, S#elixir_scope.file, ?MODULE, { no_super, Module, Tuple })
+ {ok, {_, _, _, _}} -> ok;
+ _ -> elixir_errors:form_error(Meta, S#elixir_scope.file, ?MODULE, {no_super, Module, Tuple})
end.
%% Gets the name based on the function and stored overridables
@@ -24,8 +24,8 @@ ensure_defined(Meta, Module, Tuple, S) ->
name(Module, Function) ->
name(Module, Function, overridable(Module)).
-name(_Module, { Name, _ } = Function, Overridable) ->
- { Count, _, _, _ } = orddict:fetch(Function, Overridable),
+name(_Module, {Name, _} = Function, Overridable) ->
+ {Count, _, _, _} = orddict:fetch(Function, Overridable),
?atom_concat([Name, " (overridable ", Count, ")"]).
%% Store
@@ -33,24 +33,24 @@ name(_Module, { Name, _ } = Function, Overridable) ->
store(Module, Function, GenerateName) ->
Overridable = overridable(Module),
case orddict:fetch(Function, Overridable) of
- { _Count, _Clause, _Neighbours, true } -> ok;
- { Count, Clause, Neighbours, false } ->
- overridable(Module, orddict:store(Function, { Count, Clause, Neighbours, true }, Overridable)),
- { { { Name, Arity }, Kind, Line, File, _Check, Location, Defaults }, Clauses } = Clause,
-
- { FinalKind, FinalName } = case GenerateName of
- true -> { defp, name(Module, Function, Overridable) };
- false -> { Kind, Name }
+ {_Count, _Clause, _Neighbours, true} -> ok;
+ {Count, Clause, Neighbours, false} ->
+ overridable(Module, orddict:store(Function, {Count, Clause, Neighbours, true}, Overridable)),
+ {{{Name, Arity}, Kind, Line, File, _Check, Location, Defaults}, Clauses} = Clause,
+
+ {FinalKind, FinalName} = case GenerateName of
+ true -> {defp, name(Module, Function, Overridable)};
+ false -> {Kind, Name}
end,
case code:is_loaded('Elixir.Module.LocalsTracker') of
- { _, _ } ->
- 'Elixir.Module.LocalsTracker':reattach(Module, Kind, { Name, Arity }, Neighbours);
+ {_, _} ->
+ 'Elixir.Module.LocalsTracker':reattach(Module, Kind, {Name, Arity}, Neighbours);
_ ->
ok
end,
- Def = { function, Line, FinalName, Arity, Clauses },
+ Def = {function, Line, FinalName, Arity, Clauses},
elixir_def:store_each(false, FinalKind, File, Location,
elixir_def:table(Module), elixir_def:clauses_table(Module), Defaults, Def)
end.
@@ -58,18 +58,18 @@ store(Module, Function, GenerateName) ->
%% Store pending declarations that were not manually made concrete.
store_pending(Module) ->
- [store(Module, X, false) || { X, { _, _, _, false } } <- overridable(Module),
+ [store(Module, X, false) || {X, {_, _, _, false}} <- overridable(Module),
not 'Elixir.Module':'defines?'(Module, X)].
%% Error handling
-format_error({ no_super, Module, { Name, Arity } }) ->
- Bins = [format_fa(X) || { X, { _, _, _, _ } } <- overridable(Module)],
+format_error({no_super, Module, {Name, Arity}}) ->
+ Bins = [format_fa(X) || {X, {_, _, _, _}} <- overridable(Module)],
Joined = 'Elixir.Enum':join(Bins, <<", ">>),
io_lib:format("no super defined for ~ts/~B in module ~ts. Overridable functions available are: ~ts",
[Name, Arity, elixir_aliases:inspect(Module), Joined]).
-format_fa({ Name, Arity }) ->
+format_fa({Name, Arity}) ->
A = atom_to_binary(Name, utf8),
B = integer_to_binary(Arity),
<< A/binary, $/, B/binary >>. \ No newline at end of file
diff --git a/lib/elixir/src/elixir_dispatch.erl b/lib/elixir/src/elixir_dispatch.erl
index dbd3c3942..4c615bb7d 100644
--- a/lib/elixir/src/elixir_dispatch.erl
+++ b/lib/elixir/src/elixir_dispatch.erl
@@ -17,21 +17,21 @@
-define(system, 'Elixir.System').
default_functions() ->
- [ { ?kernel, elixir_imported_functions() } ].
+ [ {?kernel, elixir_imported_functions()} ].
default_macros() ->
- [ { ?kernel, elixir_imported_macros() } ].
+ [ {?kernel, elixir_imported_macros()} ].
default_requires() ->
[ 'Elixir.Kernel', 'Elixir.Kernel.Typespec' ].
find_import(Meta, Name, Arity, E) ->
- Tuple = { Name, Arity },
+ Tuple = {Name, Arity},
case find_dispatch(Meta, Tuple, [], E) of
- { function, Receiver } ->
+ {function, Receiver} ->
elixir_lexical:record_import(Receiver, E#elixir_env.lexical_tracker),
elixir_locals:record_import(Tuple, Receiver, E#elixir_env.module, E#elixir_env.function),
Receiver;
- { macro, Receiver } ->
+ {macro, Receiver} ->
elixir_lexical:record_import(Receiver, E#elixir_env.lexical_tracker),
elixir_locals:record_import(Tuple, Receiver, E#elixir_env.module, E#elixir_env.function),
Receiver;
@@ -42,27 +42,27 @@ find_import(Meta, Name, Arity, E) ->
%% Function retrieval
import_function(Meta, Name, Arity, E) ->
- Tuple = { Name, Arity },
+ Tuple = {Name, Arity},
case find_dispatch(Meta, Tuple, [], E) of
- { function, Receiver } ->
+ {function, Receiver} ->
elixir_lexical:record_import(Receiver, E#elixir_env.lexical_tracker),
elixir_locals:record_import(Tuple, Receiver, E#elixir_env.module, E#elixir_env.function),
remote_function(Receiver, Name, Arity, E);
- { macro, _Receiver } ->
+ {macro, _Receiver} ->
false;
- { import, Receiver } ->
+ {import, Receiver} ->
require_function(Meta, Receiver, Name, Arity, E);
false ->
case elixir_import:special_form(Name, Arity) of
true -> false;
false ->
elixir_locals:record_local(Tuple, E#elixir_env.module, E#elixir_env.function),
- { local, Name, Arity }
+ {local, Name, Arity}
end
end.
require_function(_Meta, Receiver, Name, Arity, E) ->
- case is_element({ Name, Arity }, get_optional_macros(Receiver)) of
+ case is_element({Name, Arity}, get_optional_macros(Receiver)) of
true -> false;
false -> remote_function(Receiver, Name, Arity, E)
end.
@@ -70,19 +70,19 @@ require_function(_Meta, Receiver, Name, Arity, E) ->
remote_function(Receiver, Name, Arity, E) ->
elixir_lexical:record_remote(Receiver, E#elixir_env.lexical_tracker),
case inline(Receiver, Name, Arity) of
- { AR, AN } -> { remote, AR, AN, Arity };
- false -> { remote, Receiver, Name, Arity }
+ {AR, AN} -> {remote, AR, AN, Arity};
+ false -> {remote, Receiver, Name, Arity}
end.
%% Dispatches
dispatch_import(Meta, Name, Args, E, Callback) ->
Arity = length(Args),
- case expand_import(Meta, { Name, Arity }, Args, E, []) of
- { ok, Receiver, Quoted } ->
+ case expand_import(Meta, {Name, Arity}, Args, E, []) of
+ {ok, Receiver, Quoted} ->
expand_quoted(Meta, Receiver, Name, Arity, Quoted, E);
- { ok, Receiver } ->
- elixir_exp:expand({ { '.', [], [Receiver, Name] }, Meta, Args }, E);
+ {ok, Receiver} ->
+ elixir_exp:expand({{'.', [], [Receiver, Name]}, Meta, Args}, E);
error ->
Callback()
end.
@@ -91,11 +91,11 @@ dispatch_require(Meta, Receiver, Name, Args, E, Callback) when is_atom(Receiver)
Arity = length(Args),
case rewrite(Receiver, Name, Args, Arity) of
- { ok, AR, AN, AA } ->
+ {ok, AR, AN, AA} ->
Callback(AR, AN, AA);
false ->
- case expand_require(Meta, Receiver, { Name, Arity }, Args, E) of
- { ok, Receiver, Quoted } -> expand_quoted(Meta, Receiver, Name, Arity, Quoted, E);
+ case expand_require(Meta, Receiver, {Name, Arity}, Args, E) of
+ {ok, Receiver, Quoted} -> expand_quoted(Meta, Receiver, Name, Arity, Quoted, E);
error -> Callback(Receiver, Name, Args)
end
end;
@@ -105,7 +105,7 @@ dispatch_require(_Meta, Receiver, Name, Args, _E, Callback) ->
%% Macros expansion
-expand_import(Meta, { Name, Arity } = Tuple, Args, E, Extra) ->
+expand_import(Meta, {Name, Arity} = Tuple, Args, E, Extra) ->
Module = E#elixir_env.module,
Dispatch = find_dispatch(Meta, Tuple, Extra, E),
Function = E#elixir_env.function,
@@ -114,13 +114,13 @@ expand_import(Meta, { Name, Arity } = Tuple, Args, E, Extra) ->
case Dispatch of
%% In case it is an import, we dispatch the import.
- { import, _ } ->
+ {import, _} ->
do_expand_import(Meta, Tuple, Args, Module, E, Dispatch);
%% There is a local and an import. This is a conflict unless
%% the receiver is the same as module (happens on bootstrap).
- { _, Receiver } when Local /= false, Receiver /= Module ->
- Error = { macro_conflict, { Receiver, Name, Arity } },
+ {_, Receiver} when Local /= false, Receiver /= Module ->
+ Error = {macro_conflict, {Receiver, Name, Arity}},
elixir_errors:form_error(Meta, E#elixir_env.file, ?MODULE, Error);
%% There is no local. Dispatch the import.
@@ -130,33 +130,33 @@ expand_import(Meta, { Name, Arity } = Tuple, Args, E, Extra) ->
%% Dispatch to the local.
_ ->
elixir_locals:record_local(Tuple, Module, Function),
- { ok, Module, expand_macro_fun(Meta, Local(), Module, Name, Args, E) }
+ {ok, Module, expand_macro_fun(Meta, Local(), Module, Name, Args, E)}
end.
-do_expand_import(Meta, { Name, Arity } = Tuple, Args, Module, E, Result) ->
+do_expand_import(Meta, {Name, Arity} = Tuple, Args, Module, E, Result) ->
case Result of
- { function, Receiver } ->
+ {function, Receiver} ->
elixir_lexical:record_import(Receiver, E#elixir_env.lexical_tracker),
elixir_locals:record_import(Tuple, Receiver, Module, E#elixir_env.function),
case rewrite(Receiver, Name, Args, Arity) of
- { ok, AR, AN, AA } ->
- { ok, AR, { { '.', [], [AR, AN] }, [], AA } };
+ {ok, AR, AN, AA} ->
+ {ok, AR, {{'.', [], [AR, AN]}, [], AA}};
false ->
- { ok, Receiver }
+ {ok, Receiver}
end;
- { macro, Receiver } ->
+ {macro, Receiver} ->
elixir_lexical:record_import(Receiver, E#elixir_env.lexical_tracker),
elixir_locals:record_import(Tuple, Receiver, Module, E#elixir_env.function),
- { ok, Receiver, expand_macro_named(Meta, Receiver, Name, Arity, Args, E) };
- { import, Receiver } ->
+ {ok, Receiver, expand_macro_named(Meta, Receiver, Name, Arity, Args, E)};
+ {import, Receiver} ->
case expand_require([{require,false}|Meta], Receiver, Tuple, Args, E) of
- { ok, _, _ } = Response -> Response;
- error -> { ok, Receiver }
+ {ok, _, _} = Response -> Response;
+ error -> {ok, Receiver}
end;
false when Module == ?kernel ->
case rewrite(Module, Name, Args, Arity) of
- { ok, AR, AN, AA } ->
- { ok, AR, { { '.', [], [AR, AN] }, [], AA } };
+ {ok, AR, AN, AA} ->
+ {ok, AR, {{'.', [], [AR, AN]}, [], AA}};
false ->
error
end;
@@ -164,7 +164,7 @@ do_expand_import(Meta, { Name, Arity } = Tuple, Args, Module, E, Result) ->
error
end.
-expand_require(Meta, Receiver, { Name, Arity } = Tuple, Args, E) ->
+expand_require(Meta, Receiver, {Name, Arity} = Tuple, Args, E) ->
Module = E#elixir_env.module,
case is_element(Tuple, get_optional_macros(Receiver)) of
@@ -173,9 +173,9 @@ expand_require(Meta, Receiver, { Name, Arity } = Tuple, Args, E) ->
case (Receiver == Module) orelse is_element(Receiver, Requires) orelse skip_require(Meta) of
true ->
elixir_lexical:record_remote(Receiver, E#elixir_env.lexical_tracker),
- { ok, Receiver, expand_macro_named(Meta, Receiver, Name, Arity, Args, E) };
+ {ok, Receiver, expand_macro_named(Meta, Receiver, Name, Arity, Args, E)};
false ->
- Info = { unrequired_module, { Receiver, Name, length(Args), Requires } },
+ Info = {unrequired_module, {Receiver, Name, length(Args), Requires}},
elixir_errors:form_error(Meta, E#elixir_env.file, ?MODULE, Info)
end;
false ->
@@ -186,13 +186,13 @@ expand_require(Meta, Receiver, { Name, Arity } = Tuple, Args, E) ->
expand_macro_fun(Meta, Fun, Receiver, Name, Args, E) ->
Line = ?line(Meta),
- EArg = { Line, E },
+ EArg = {Line, E},
try
apply(Fun, [EArg|Args])
catch
Kind:Reason ->
- Info = [{ Receiver, Name, length(Args), location(Line, E) }, mfa(Line, E)],
+ Info = [{Receiver, Name, length(Args), location(Line, E)}, mfa(Line, E)],
erlang:raise(Kind, Reason, prune_stacktrace(Info, erlang:get_stacktrace(), EArg))
end.
@@ -208,32 +208,32 @@ expand_quoted(Meta, Receiver, Name, Arity, Quoted, E) ->
try
elixir_exp:expand(
- elixir_quote:linify_with_context_counter(Line, { Receiver, Next }, Quoted),
+ elixir_quote:linify_with_context_counter(Line, {Receiver, Next}, Quoted),
E)
catch
Kind:Reason ->
- Info = [{ Receiver, Name, Arity, location(Line, E) }, mfa(Line, E)],
+ Info = [{Receiver, Name, Arity, location(Line, E)}, mfa(Line, E)],
erlang:raise(Kind, Reason, prune_stacktrace(Info, erlang:get_stacktrace(), nil))
end.
mfa(Line, #elixir_env{module=nil} = E) ->
- { elixir_compiler, '__FILE__', 2, location(Line, E) };
+ {elixir_compiler, '__FILE__', 2, location(Line, E)};
mfa(Line, #elixir_env{module=Module, function=nil} = E) ->
- { Module, '__MODULE__', 0, location(Line, E) };
-mfa(Line, #elixir_env{module=Module, function={ Name, Arity }} = E) ->
- { Module, Name, Arity, location(Line, E) }.
+ {Module, '__MODULE__', 0, location(Line, E)};
+mfa(Line, #elixir_env{module=Module, function={Name, Arity}} = E) ->
+ {Module, Name, Arity, location(Line, E)}.
location(Line, E) ->
- [{ file, elixir_utils:characters_to_list(E#elixir_env.file) }, { line, Line }].
+ [{file, elixir_utils:characters_to_list(E#elixir_env.file)}, {line, Line}].
%% Helpers
skip_require(Meta) ->
- lists:keyfind(require, 1, Meta) == { require, false }.
+ lists:keyfind(require, 1, Meta) == {require, false}.
find_dispatch(Meta, Tuple, Extra, E) ->
case is_import(Meta) of
- { import, _ } = Import ->
+ {import, _} = Import ->
Import;
false ->
Funs = E#elixir_env.functions,
@@ -241,26 +241,26 @@ find_dispatch(Meta, Tuple, Extra, E) ->
FunMatch = find_dispatch(Tuple, Funs),
MacMatch = find_dispatch(Tuple, Macs),
- case { FunMatch, MacMatch } of
- { [], [Receiver] } -> { macro, Receiver };
- { [Receiver], [] } -> { function, Receiver };
- { [], [] } -> false;
+ case {FunMatch, MacMatch} of
+ {[], [Receiver]} -> {macro, Receiver};
+ {[Receiver], []} -> {function, Receiver};
+ {[], []} -> false;
_ ->
- { Name, Arity } = Tuple,
+ {Name, Arity} = Tuple,
[First, Second|_] = FunMatch ++ MacMatch,
- Error = { ambiguous_call, { First, Second, Name, Arity } },
+ Error = {ambiguous_call, {First, Second, Name, Arity}},
elixir_errors:form_error(Meta, E#elixir_env.file, ?MODULE, Error)
end
end.
find_dispatch(Tuple, List) ->
- [Receiver || { Receiver, Set } <- List, is_element(Tuple, Set)].
+ [Receiver || {Receiver, Set} <- List, is_element(Tuple, Set)].
is_import(Meta) ->
case lists:keyfind(import, 1, Meta) of
- { import, _ } = Import ->
+ {import, _} = Import ->
case lists:keyfind(context, 1, Meta) of
- { context, _ } -> Import;
+ {context, _} -> Import;
false ->
false
end;
@@ -269,10 +269,10 @@ is_import(Meta) ->
end.
%% We've reached the invoked macro, skip it with the rest
-prune_stacktrace(Info, [{ _, _, [E|_], _ }|_], E) ->
+prune_stacktrace(Info, [{_, _, [E|_], _}|_], E) ->
Info;
%% We've reached the elixir_dispatch internals, skip it with the rest
-prune_stacktrace(Info, [{ Mod, _, _, _ }|_], _) when Mod == elixir_dispatch; Mod == elixir_exp ->
+prune_stacktrace(Info, [{Mod, _, _, _}|_], _) when Mod == elixir_dispatch; Mod == elixir_exp ->
Info;
prune_stacktrace(Info, [H|T], E) ->
[H|prune_stacktrace(Info, T, E)];
@@ -281,15 +281,15 @@ prune_stacktrace(Info, [], _) ->
%% ERROR HANDLING
-format_error({ unrequired_module, { Receiver, Name, Arity, _Required }}) ->
+format_error({unrequired_module, {Receiver, Name, Arity, _Required}}) ->
Module = elixir_aliases:inspect(Receiver),
io_lib:format("you must require ~ts before invoking the macro ~ts.~ts/~B",
[Module, Module, Name, Arity]);
-format_error({ macro_conflict, { Receiver, Name, Arity } }) ->
+format_error({macro_conflict, {Receiver, Name, Arity}}) ->
io_lib:format("call to local macro ~ts/~B conflicts with imported ~ts.~ts/~B, "
"please rename the local macro or remove the conflicting import",
[Name, Arity, elixir_aliases:inspect(Receiver), Name, Arity]);
-format_error({ ambiguous_call, { Mod1, Mod2, Name, Arity }}) ->
+format_error({ambiguous_call, {Mod1, Mod2, Name, Arity}}) ->
io_lib:format("function ~ts/~B imported from both ~ts and ~ts, call is ambiguous",
[Name, Arity, elixir_aliases:inspect(Mod1), elixir_aliases:inspect(Mod2)]).
@@ -300,13 +300,13 @@ get_optional_macros(erlang) -> [];
get_optional_macros(Receiver) ->
case code:ensure_loaded(Receiver) of
- { module, Receiver } ->
+ {module, Receiver} ->
try
Receiver:'__info__'(macros)
catch
error:undef -> []
end;
- { error, _ } -> []
+ {error, _} -> []
end.
elixir_imported_functions() ->
@@ -324,136 +324,136 @@ elixir_imported_macros() ->
end.
rewrite(?kernel, atom_to_binary, [Arg], 1) ->
- { ok, erlang, atom_to_binary, [Arg, utf8] };
+ {ok, erlang, atom_to_binary, [Arg, utf8]};
rewrite(?kernel, binary_to_atom, [Arg], 1) ->
- { ok, erlang, binary_to_atom, [Arg, utf8] };
+ {ok, erlang, binary_to_atom, [Arg, utf8]};
rewrite(?kernel, binary_to_existing_atom, [Arg], 1) ->
- { ok, erlang, binary_to_existing_atom, [Arg, utf8] };
+ {ok, erlang, binary_to_existing_atom, [Arg, utf8]};
rewrite(?kernel, elem, [Tuple, Index], 2) ->
- { ok, erlang, element, [increment(Index), Tuple] };
+ {ok, erlang, element, [increment(Index), Tuple]};
rewrite(?kernel, set_elem, [Tuple, Index, Value], 2) ->
- { ok, erlang, setelement, [increment(Index), Tuple, Value] };
+ {ok, erlang, setelement, [increment(Index), Tuple, Value]};
rewrite(?process, monitor, [Arg], 1) ->
- { ok, erlang, monitor, [process, Arg] };
+ {ok, erlang, monitor, [process, Arg]};
rewrite(Receiver, Name, Args, Arity) ->
case inline(Receiver, Name, Arity) of
- { AR, AN } -> { ok, AR, AN, Args };
+ {AR, AN} -> {ok, AR, AN, Args};
false -> false
end.
increment(Number) when is_number(Number) ->
Number + 1;
increment(Other) ->
- { { '.', [], [erlang, '+'] }, [], [Other, 1] }.
-
-inline(?kernel, '+', 2) -> { erlang, '+' };
-inline(?kernel, '-', 2) -> { erlang, '-' };
-inline(?kernel, '+', 1) -> { erlang, '+' };
-inline(?kernel, '-', 1) -> { erlang, '-' };
-inline(?kernel, '*', 2) -> { erlang, '*' };
-inline(?kernel, '/', 2) -> { erlang, '/' };
-inline(?kernel, '++', 2) -> { erlang, '++' };
-inline(?kernel, '--', 2) -> { erlang, '--' };
-inline(?kernel, 'xor', 2) -> { erlang, 'xor' };
-inline(?kernel, 'not', 1) -> { erlang, 'not' };
-inline(?kernel, '<', 2) -> { erlang, '<' };
-inline(?kernel, '>', 2) -> { erlang, '>' };
-inline(?kernel, '<=', 2) -> { erlang, '=<' };
-inline(?kernel, '>=', 2) -> { erlang, '>=' };
-inline(?kernel, '==', 2) -> { erlang, '==' };
-inline(?kernel, '!=', 2) -> { erlang, '/=' };
-inline(?kernel, '===', 2) -> { erlang, '=:=' };
-inline(?kernel, '!==', 2) -> { erlang, '=/=' };
-inline(?kernel, abs, 1) -> { erlang, abs };
-inline(?kernel, apply, 2) -> { erlang, apply };
-inline(?kernel, apply, 3) -> { erlang, apply };
-inline(?kernel, atom_to_list, 1) -> { erlang, atom_to_list };
-inline(?kernel, binary_part, 3) -> { erlang, binary_part };
-inline(?kernel, binary_to_float, 1) -> { erlang, binary_to_float };
-inline(?kernel, binary_to_float, 2) -> { erlang, binary_to_float };
-inline(?kernel, binary_to_integer, 1) -> { erlang, binary_to_integer };
-inline(?kernel, binary_to_integer, 2) -> { erlang, binary_to_integer };
-inline(?kernel, bit_size, 1) -> { erlang, bit_size };
-inline(?kernel, bitstring_to_list, 1) -> { erlang, bitstring_to_list };
-inline(?kernel, byte_size, 1) -> { erlang, byte_size };
-inline(?kernel, 'div', 2) -> { erlang, 'div' };
-inline(?kernel, exit, 1) -> { erlang, exit };
-inline(?kernel, float_to_binary, 1) -> { erlang, float_to_binary };
-inline(?kernel, float_to_list, 1) -> { erlang, float_to_list };
-inline(?kernel, hd, 1) -> { erlang, hd };
-inline(?kernel, integer_to_binary, 1) -> { erlang, integer_to_binary };
-inline(?kernel, integer_to_binary, 2) -> { erlang, integer_to_binary };
-inline(?kernel, integer_to_list, 1) -> { erlang, integer_to_list };
-inline(?kernel, integer_to_list, 2) -> { erlang, integer_to_list };
-inline(?kernel, iodata_size, 1) -> { erlang, iolist_size };
-inline(?kernel, iodata_to_binary, 1) -> { erlang, iolist_to_binary };
-inline(?kernel, is_atom, 1) -> { erlang, is_atom };
-inline(?kernel, is_binary, 1) -> { erlang, is_binary };
-inline(?kernel, is_bitstring, 1) -> { erlang, is_bitstring };
-inline(?kernel, is_boolean, 1) -> { erlang, is_boolean };
-inline(?kernel, is_float, 1) -> { erlang, is_float };
-inline(?kernel, is_function, 1) -> { erlang, is_function };
-inline(?kernel, is_function, 2) -> { erlang, is_function };
-inline(?kernel, is_integer, 1) -> { erlang, is_integer };
-inline(?kernel, is_list, 1) -> { erlang, is_list };
-inline(?kernel, is_map, 1) -> { erlang, is_map };
-inline(?kernel, is_number, 1) -> { erlang, is_number };
-inline(?kernel, is_pid, 1) -> { erlang, is_pid };
-inline(?kernel, is_port, 1) -> { erlang, is_port };
-inline(?kernel, is_reference, 1) -> { erlang, is_reference };
-inline(?kernel, is_tuple, 1) -> { erlang, is_tuple };
-inline(?kernel, length, 1) -> { erlang, length };
-inline(?kernel, list_to_atom, 1) -> { erlang, list_to_atom };
-inline(?kernel, list_to_bitstring, 1) -> { erlang, list_to_bitstring };
-inline(?kernel, list_to_existing_atom, 1) -> { erlang, list_to_existing_atom };
-inline(?kernel, list_to_float, 1) -> { erlang, list_to_float };
-inline(?kernel, list_to_integer, 1) -> { erlang, list_to_integer };
-inline(?kernel, list_to_integer, 2) -> { erlang, list_to_integer };
-inline(?kernel, list_to_tuple, 1) -> { erlang, list_to_tuple };
-inline(?kernel, make_ref, 0) -> { erlang, make_ref };
-inline(?kernel, map_size, 1) -> { erlang, map_size };
-inline(?kernel, max, 2) -> { erlang, max };
-inline(?kernel, min, 2) -> { erlang, min };
-inline(?kernel, node, 0) -> { erlang, node };
-inline(?kernel, node, 1) -> { erlang, node };
-inline(?kernel, 'rem', 2) -> { erlang, 'rem' };
-inline(?kernel, round, 1) -> { erlang, round };
-inline(?kernel, self, 0) -> { erlang, self };
-inline(?kernel, send, 2) -> { erlang, send };
-inline(?kernel, size, 1) -> { erlang, size };
-inline(?kernel, spawn, 1) -> { erlang, spawn };
-inline(?kernel, spawn, 3) -> { erlang, spawn };
-inline(?kernel, spawn_link, 1) -> { erlang, spawn_link };
-inline(?kernel, spawn_link, 3) -> { erlang, spawn_link };
-inline(?kernel, throw, 1) -> { erlang, throw };
-inline(?kernel, tl, 1) -> { erlang, tl };
-inline(?kernel, trunc, 1) -> { erlang, trunc };
-inline(?kernel, tuple_size, 1) -> { erlang, tuple_size };
-inline(?kernel, tuple_to_list, 1) -> { erlang, tuple_to_list };
-
-inline(?process, exit, 2) -> { erlang, exit };
-inline(?process, spawn, 1) -> { erlang, spawn };
-inline(?process, spawn, 2) -> { erlang, spawn_opt };
-inline(?process, spawn, 3) -> { erlang, spawn };
-inline(?process, spawn, 4) -> { erlang, spawn_opt };
-inline(?process, spawn_link, 1) -> { erlang, spawn_link };
-inline(?process, spawn_link, 3) -> { erlang, spawn_link };
-inline(?process, spawn_monitor, 1) -> { erlang, spawn_monitor };
-inline(?process, spawn_monitor, 3) -> { erlang, spawn_monitor };
-inline(?process, demonitor, 1) -> { erlang, demonitor };
-inline(?process, demonitor, 2) -> { erlang, demonitor };
-inline(?process, link, 1) -> { erlang, link };
-inline(?process, unlink, 1) -> { erlang, unlink };
-
-inline(?node, spawn, 2) -> { erlang, spawn };
-inline(?node, spawn, 3) -> { erlang, spawn_opt };
-inline(?node, spawn, 4) -> { erlang, spawn };
-inline(?node, spawn, 5) -> { erlang, spawn_opt };
-inline(?node, spawn_link, 2) -> { erlang, spawn_link };
-inline(?node, spawn_link, 4) -> { erlang, spawn_link };
-inline(?node, spawn_monitor, 2) -> { erlang, spawn_monitor };
-inline(?node, spawn_monitor, 4) -> { erlang, spawn_monitor };
-
-inline(?system, stacktrace, 0) -> { erlang, get_stacktrace };
+ {{'.', [], [erlang, '+']}, [], [Other, 1]}.
+
+inline(?kernel, '+', 2) -> {erlang, '+'};
+inline(?kernel, '-', 2) -> {erlang, '-'};
+inline(?kernel, '+', 1) -> {erlang, '+'};
+inline(?kernel, '-', 1) -> {erlang, '-'};
+inline(?kernel, '*', 2) -> {erlang, '*'};
+inline(?kernel, '/', 2) -> {erlang, '/'};
+inline(?kernel, '++', 2) -> {erlang, '++'};
+inline(?kernel, '--', 2) -> {erlang, '--'};
+inline(?kernel, 'xor', 2) -> {erlang, 'xor'};
+inline(?kernel, 'not', 1) -> {erlang, 'not'};
+inline(?kernel, '<', 2) -> {erlang, '<'};
+inline(?kernel, '>', 2) -> {erlang, '>'};
+inline(?kernel, '<=', 2) -> {erlang, '=<'};
+inline(?kernel, '>=', 2) -> {erlang, '>='};
+inline(?kernel, '==', 2) -> {erlang, '=='};
+inline(?kernel, '!=', 2) -> {erlang, '/='};
+inline(?kernel, '===', 2) -> {erlang, '=:='};
+inline(?kernel, '!==', 2) -> {erlang, '=/='};
+inline(?kernel, abs, 1) -> {erlang, abs};
+inline(?kernel, apply, 2) -> {erlang, apply};
+inline(?kernel, apply, 3) -> {erlang, apply};
+inline(?kernel, atom_to_list, 1) -> {erlang, atom_to_list};
+inline(?kernel, binary_part, 3) -> {erlang, binary_part};
+inline(?kernel, binary_to_float, 1) -> {erlang, binary_to_float};
+inline(?kernel, binary_to_float, 2) -> {erlang, binary_to_float};
+inline(?kernel, binary_to_integer, 1) -> {erlang, binary_to_integer};
+inline(?kernel, binary_to_integer, 2) -> {erlang, binary_to_integer};
+inline(?kernel, bit_size, 1) -> {erlang, bit_size};
+inline(?kernel, bitstring_to_list, 1) -> {erlang, bitstring_to_list};
+inline(?kernel, byte_size, 1) -> {erlang, byte_size};
+inline(?kernel, 'div', 2) -> {erlang, 'div'};
+inline(?kernel, exit, 1) -> {erlang, exit};
+inline(?kernel, float_to_binary, 1) -> {erlang, float_to_binary};
+inline(?kernel, float_to_list, 1) -> {erlang, float_to_list};
+inline(?kernel, hd, 1) -> {erlang, hd};
+inline(?kernel, integer_to_binary, 1) -> {erlang, integer_to_binary};
+inline(?kernel, integer_to_binary, 2) -> {erlang, integer_to_binary};
+inline(?kernel, integer_to_list, 1) -> {erlang, integer_to_list};
+inline(?kernel, integer_to_list, 2) -> {erlang, integer_to_list};
+inline(?kernel, iodata_size, 1) -> {erlang, iolist_size};
+inline(?kernel, iodata_to_binary, 1) -> {erlang, iolist_to_binary};
+inline(?kernel, is_atom, 1) -> {erlang, is_atom};
+inline(?kernel, is_binary, 1) -> {erlang, is_binary};
+inline(?kernel, is_bitstring, 1) -> {erlang, is_bitstring};
+inline(?kernel, is_boolean, 1) -> {erlang, is_boolean};
+inline(?kernel, is_float, 1) -> {erlang, is_float};
+inline(?kernel, is_function, 1) -> {erlang, is_function};
+inline(?kernel, is_function, 2) -> {erlang, is_function};
+inline(?kernel, is_integer, 1) -> {erlang, is_integer};
+inline(?kernel, is_list, 1) -> {erlang, is_list};
+inline(?kernel, is_map, 1) -> {erlang, is_map};
+inline(?kernel, is_number, 1) -> {erlang, is_number};
+inline(?kernel, is_pid, 1) -> {erlang, is_pid};
+inline(?kernel, is_port, 1) -> {erlang, is_port};
+inline(?kernel, is_reference, 1) -> {erlang, is_reference};
+inline(?kernel, is_tuple, 1) -> {erlang, is_tuple};
+inline(?kernel, length, 1) -> {erlang, length};
+inline(?kernel, list_to_atom, 1) -> {erlang, list_to_atom};
+inline(?kernel, list_to_bitstring, 1) -> {erlang, list_to_bitstring};
+inline(?kernel, list_to_existing_atom, 1) -> {erlang, list_to_existing_atom};
+inline(?kernel, list_to_float, 1) -> {erlang, list_to_float};
+inline(?kernel, list_to_integer, 1) -> {erlang, list_to_integer};
+inline(?kernel, list_to_integer, 2) -> {erlang, list_to_integer};
+inline(?kernel, list_to_tuple, 1) -> {erlang, list_to_tuple};
+inline(?kernel, make_ref, 0) -> {erlang, make_ref};
+inline(?kernel, map_size, 1) -> {erlang, map_size};
+inline(?kernel, max, 2) -> {erlang, max};
+inline(?kernel, min, 2) -> {erlang, min};
+inline(?kernel, node, 0) -> {erlang, node};
+inline(?kernel, node, 1) -> {erlang, node};
+inline(?kernel, 'rem', 2) -> {erlang, 'rem'};
+inline(?kernel, round, 1) -> {erlang, round};
+inline(?kernel, self, 0) -> {erlang, self};
+inline(?kernel, send, 2) -> {erlang, send};
+inline(?kernel, size, 1) -> {erlang, size};
+inline(?kernel, spawn, 1) -> {erlang, spawn};
+inline(?kernel, spawn, 3) -> {erlang, spawn};
+inline(?kernel, spawn_link, 1) -> {erlang, spawn_link};
+inline(?kernel, spawn_link, 3) -> {erlang, spawn_link};
+inline(?kernel, throw, 1) -> {erlang, throw};
+inline(?kernel, tl, 1) -> {erlang, tl};
+inline(?kernel, trunc, 1) -> {erlang, trunc};
+inline(?kernel, tuple_size, 1) -> {erlang, tuple_size};
+inline(?kernel, tuple_to_list, 1) -> {erlang, tuple_to_list};
+
+inline(?process, exit, 2) -> {erlang, exit};
+inline(?process, spawn, 1) -> {erlang, spawn};
+inline(?process, spawn, 2) -> {erlang, spawn_opt};
+inline(?process, spawn, 3) -> {erlang, spawn};
+inline(?process, spawn, 4) -> {erlang, spawn_opt};
+inline(?process, spawn_link, 1) -> {erlang, spawn_link};
+inline(?process, spawn_link, 3) -> {erlang, spawn_link};
+inline(?process, spawn_monitor, 1) -> {erlang, spawn_monitor};
+inline(?process, spawn_monitor, 3) -> {erlang, spawn_monitor};
+inline(?process, demonitor, 1) -> {erlang, demonitor};
+inline(?process, demonitor, 2) -> {erlang, demonitor};
+inline(?process, link, 1) -> {erlang, link};
+inline(?process, unlink, 1) -> {erlang, unlink};
+
+inline(?node, spawn, 2) -> {erlang, spawn};
+inline(?node, spawn, 3) -> {erlang, spawn_opt};
+inline(?node, spawn, 4) -> {erlang, spawn};
+inline(?node, spawn, 5) -> {erlang, spawn_opt};
+inline(?node, spawn_link, 2) -> {erlang, spawn_link};
+inline(?node, spawn_link, 4) -> {erlang, spawn_link};
+inline(?node, spawn_monitor, 2) -> {erlang, spawn_monitor};
+inline(?node, spawn_monitor, 4) -> {erlang, spawn_monitor};
+
+inline(?system, stacktrace, 0) -> {erlang, get_stacktrace};
inline(_, _, _) -> false.
diff --git a/lib/elixir/src/elixir_env.erl b/lib/elixir/src/elixir_env.erl
index e425473d3..3fae8b3a1 100644
--- a/lib/elixir/src/elixir_env.erl
+++ b/lib/elixir/src/elixir_env.erl
@@ -5,7 +5,7 @@
%% Conversion in between #elixir_env, #elixir_scope and Macro.Env
-env_to_ex({ Line, #elixir_env{} = Env }) ->
+env_to_ex({Line, #elixir_env{} = Env}) ->
erlang:setelement(1, Env#elixir_env{line=Line}, 'Elixir.Macro.Env').
ex_to_env(Env) when element(1, Env) == 'Elixir.Macro.Env' ->
@@ -18,7 +18,7 @@ env_to_scope_with_vars(#elixir_env{} = Env, Vars) ->
(env_to_scope(Env))#elixir_scope{
vars=orddict:from_list(Vars),
counter=[{'_',length(Vars)}]
- }.
+ }.
%% SCOPE MERGING
@@ -28,12 +28,12 @@ mergev(E1, E2) when is_list(E1) ->
E2#elixir_env{
vars=merge_vars(E1, E2#elixir_env.vars),
export_vars=merge_opt_vars(E1, E2#elixir_env.export_vars)
- };
+ };
mergev(E1, E2) ->
E2#elixir_env{
vars=merge_vars(E1#elixir_env.vars, E2#elixir_env.vars),
export_vars=merge_opt_vars(E1#elixir_env.export_vars, E2#elixir_env.export_vars)
- }.
+ }.
%% Receives two scopes and return the later scope
%% keeping the variables from the first (imports
diff --git a/lib/elixir/src/elixir_errors.erl b/lib/elixir/src/elixir_errors.erl
index 8221ee323..33d398870 100644
--- a/lib/elixir/src/elixir_errors.erl
+++ b/lib/elixir/src/elixir_errors.erl
@@ -13,7 +13,7 @@ warn(Warning) ->
CompilerPid = get(elixir_compiler_pid),
if
CompilerPid =/= undefined ->
- elixir_code_server:cast({ register_warning, CompilerPid });
+ elixir_code_server:cast({register_warning, CompilerPid});
true -> false
end,
io:put_chars(standard_error, Warning).
@@ -68,26 +68,26 @@ deprecation(Meta, File, Message, Args) ->
%% Handle warnings and errors (called during module compilation)
%% Ignore on bootstrap
-handle_file_warning(true, _File, { _Line, sys_core_fold, nomatch_guard }) -> [];
-handle_file_warning(true, _File, { _Line, sys_core_fold, { nomatch_shadow, _ } }) -> [];
+handle_file_warning(true, _File, {_Line, sys_core_fold, nomatch_guard}) -> [];
+handle_file_warning(true, _File, {_Line, sys_core_fold, {nomatch_shadow, _}}) -> [];
%% Ignore always
-handle_file_warning(_, _File, { _Line, sys_core_fold, useless_building }) -> [];
+handle_file_warning(_, _File, {_Line, sys_core_fold, useless_building}) -> [];
-%% This is an Erlang bug, it considers { tuple, _ }.call to always fail
-handle_file_warning(_, _File, { _Line, v3_kernel, bad_call }) -> [];
+%% This is an Erlang bug, it considers {tuple, _}.call to always fail
+handle_file_warning(_, _File, {_Line, v3_kernel, bad_call}) -> [];
%% We handle unused local warnings ourselves
-handle_file_warning(_, _File, { _Line, erl_lint, { unused_function, _ } }) -> [];
+handle_file_warning(_, _File, {_Line, erl_lint, {unused_function, _}}) -> [];
%% Make no_effect clauses pretty
-handle_file_warning(_, File, { Line, sys_core_fold, { no_effect, { erlang, F, A } } }) ->
- { Fmt, Args } = case erl_internal:comp_op(F, A) of
- true -> { "use of operator ~ts has no effect", [translate_comp_op(F)] };
+handle_file_warning(_, File, {Line, sys_core_fold, {no_effect, {erlang, F, A}}}) ->
+ {Fmt, Args} = case erl_internal:comp_op(F, A) of
+ true -> {"use of operator ~ts has no effect", [translate_comp_op(F)]};
false ->
case erl_internal:bif(F, A) of
- false -> { "the call to :erlang.~ts/~B has no effect", [F,A] };
- true -> { "the call to ~ts/~B has no effect", [F,A] }
+ false -> {"the call to :erlang.~ts/~B has no effect", [F,A]};
+ true -> {"the call to ~ts/~B has no effect", [F,A]}
end
end,
Message = io_lib:format(Fmt, Args),
@@ -95,10 +95,10 @@ handle_file_warning(_, File, { Line, sys_core_fold, { no_effect, { erlang, F, A
%% Rewrite undefined behaviour to check for protocols
handle_file_warning(_, File, {Line,erl_lint,{undefined_behaviour_func,{Fun,Arity},Module}}) ->
- { DefKind, Def, DefArity } =
+ {DefKind, Def, DefArity} =
case atom_to_list(Fun) of
- "MACRO-" ++ Rest -> { macro, list_to_atom(Rest), Arity - 1 };
- _ -> { function, Fun, Arity }
+ "MACRO-" ++ Rest -> {macro, list_to_atom(Rest), Arity - 1};
+ _ -> {function, Fun, Arity}
end,
Kind = protocol_or_behaviour(Module),
@@ -124,7 +124,7 @@ handle_file_warning(_, _File, {_Line,erl_lint,{shadowed_var,_Var,_Where}}) ->
%% Properly format other unused vars
handle_file_warning(_, File, {Line,erl_lint,{unused_var,Var}}) ->
- Message = format_error(erl_lint, { unused_var, format_var(Var) }),
+ Message = format_error(erl_lint, {unused_var, format_var(Var)}),
warn(Line, File, Message);
%% Default behaviour
@@ -195,10 +195,10 @@ protocol_or_behaviour(Module) ->
is_protocol(Module) ->
case code:ensure_loaded(Module) of
- { module, _ } ->
+ {module, _} ->
erlang:function_exported(Module, '__protocol__', 1) andalso
Module:'__protocol__'(name) == Module;
- { error, _ } ->
+ {error, _} ->
false
end.
diff --git a/lib/elixir/src/elixir_exp.erl b/lib/elixir/src/elixir_exp.erl
index 8c08fd120..dd3d15600 100644
--- a/lib/elixir/src/elixir_exp.erl
+++ b/lib/elixir/src/elixir_exp.erl
@@ -5,123 +5,123 @@
%% =
-expand({ '=', Meta, [Left, Right] }, E) ->
+expand({'=', Meta, [Left, Right]}, E) ->
assert_no_guard_scope(Meta, '=', E),
- { ERight, ER } = expand(Right, E),
- { ELeft, EL } = elixir_exp_clauses:match(fun expand/2, Left, E),
- { { '=', Meta, [ELeft, ERight] }, elixir_env:mergev(EL, ER) };
+ {ERight, ER} = expand(Right, E),
+ {ELeft, EL} = elixir_exp_clauses:match(fun expand/2, Left, E),
+ {{'=', Meta, [ELeft, ERight]}, elixir_env:mergev(EL, ER)};
%% Literal operators
-expand({ '{}', Meta, Args }, E) ->
- { EArgs, EA } = expand_args(Args, E),
- { { '{}', Meta, EArgs }, EA };
+expand({'{}', Meta, Args}, E) ->
+ {EArgs, EA} = expand_args(Args, E),
+ {{'{}', Meta, EArgs}, EA};
-expand({ '%{}', Meta, Args }, E) ->
+expand({'%{}', Meta, Args}, E) ->
elixir_map:expand_map(Meta, Args, E);
-expand({ '%', Meta, [Left, Right] }, E) ->
+expand({'%', Meta, [Left, Right]}, E) ->
elixir_map:expand_struct(Meta, Left, Right, E);
-expand({ '<<>>', Meta, Args }, E) ->
+expand({'<<>>', Meta, Args}, E) ->
elixir_bitstring:expand(Meta, Args, E);
%% Other operators
-expand({ '__op__', Meta, [_, _] = Args }, E) ->
- { EArgs, EA } = expand_args(Args, E),
- { { '__op__', Meta, EArgs }, EA };
+expand({'__op__', Meta, [_, _] = Args}, E) ->
+ {EArgs, EA} = expand_args(Args, E),
+ {{'__op__', Meta, EArgs}, EA};
-expand({ '__op__', Meta, [_, _, _] = Args }, E) ->
- { EArgs, EA } = expand_args(Args, E),
- { { '__op__', Meta, EArgs }, EA };
+expand({'__op__', Meta, [_, _, _] = Args}, E) ->
+ {EArgs, EA} = expand_args(Args, E),
+ {{'__op__', Meta, EArgs}, EA};
-expand({ '->', Meta, _Args }, E) ->
+expand({'->', Meta, _Args}, E) ->
compile_error(Meta, E#elixir_env.file, "unhandled operator ->");
%% __block__
-expand({ '__block__', _Meta, [] }, E) ->
- { nil, E };
-expand({ '__block__', _Meta, [Arg] }, E) ->
+expand({'__block__', _Meta, []}, E) ->
+ {nil, E};
+expand({'__block__', _Meta, [Arg]}, E) ->
expand(Arg, E);
-expand({ '__block__', Meta, Args }, E) when is_list(Args) ->
- { EArgs, EA } = expand_many(Args, E),
- { { '__block__', Meta, EArgs }, EA };
+expand({'__block__', Meta, Args}, E) when is_list(Args) ->
+ {EArgs, EA} = expand_many(Args, E),
+ {{'__block__', Meta, EArgs}, EA};
%% __aliases__
-expand({ '__aliases__', _, _ } = Alias, E) ->
+expand({'__aliases__', _, _} = Alias, E) ->
case elixir_aliases:expand(Alias, E#elixir_env.aliases,
E#elixir_env.macro_aliases, E#elixir_env.lexical_tracker) of
Receiver when is_atom(Receiver) ->
elixir_lexical:record_remote(Receiver, E#elixir_env.lexical_tracker),
- { Receiver, E };
+ {Receiver, E};
Aliases ->
- { EAliases, EA } = expand_args(Aliases, E),
+ {EAliases, EA} = expand_args(Aliases, E),
case lists:all(fun is_atom/1, EAliases) of
true ->
Receiver = elixir_aliases:concat(EAliases),
elixir_lexical:record_remote(Receiver, E#elixir_env.lexical_tracker),
- { Receiver, EA };
+ {Receiver, EA};
false ->
- { { { '.', [], [elixir_aliases, concat] }, [], [EAliases] }, EA }
+ {{{'.', [], [elixir_aliases, concat]}, [], [EAliases]}, EA}
end
end;
%% alias
-expand({ alias, Meta, [Ref] }, E) ->
- expand({ alias, Meta, [Ref,[]] }, E);
-expand({ alias, Meta, [Ref, KV] }, E) ->
+expand({alias, Meta, [Ref]}, E) ->
+ expand({alias, Meta, [Ref,[]]}, E);
+expand({alias, Meta, [Ref, KV]}, E) ->
assert_no_match_or_guard_scope(Meta, alias, E),
- { ERef, ER } = expand(Ref, E),
- { EKV, ET } = expand_opts(Meta, alias, [as, warn], no_alias_opts(KV), ER),
+ {ERef, ER} = expand(Ref, E),
+ {EKV, ET} = expand_opts(Meta, alias, [as, warn], no_alias_opts(KV), ER),
if
is_atom(ERef) ->
- { { alias, Meta, [ERef, EKV] },
- expand_alias(Meta, true, ERef, EKV, ET) };
+ {{alias, Meta, [ERef, EKV]},
+ expand_alias(Meta, true, ERef, EKV, ET)};
true ->
compile_error(Meta, E#elixir_env.file,
"invalid argument for alias, expected a compile time atom or alias, got: ~ts",
['Elixir.Kernel':inspect(ERef)])
end;
-expand({ require, Meta, [Ref] }, E) ->
- expand({ require, Meta, [Ref, []] }, E);
-expand({ require, Meta, [Ref, KV] }, E) ->
+expand({require, Meta, [Ref]}, E) ->
+ expand({require, Meta, [Ref, []]}, E);
+expand({require, Meta, [Ref, KV]}, E) ->
assert_no_match_or_guard_scope(Meta, require, E),
- { ERef, ER } = expand(Ref, E),
- { EKV, ET } = expand_opts(Meta, require, [as, warn], no_alias_opts(KV), ER),
+ {ERef, ER} = expand(Ref, E),
+ {EKV, ET} = expand_opts(Meta, require, [as, warn], no_alias_opts(KV), ER),
if
is_atom(ERef) ->
elixir_aliases:ensure_loaded(Meta, ERef, ET),
- { { require, Meta, [ERef, EKV] },
- expand_require(Meta, ERef, EKV, ET) };
+ {{require, Meta, [ERef, EKV]},
+ expand_require(Meta, ERef, EKV, ET)};
true ->
compile_error(Meta, E#elixir_env.file,
"invalid argument for require, expected a compile time atom or alias, got: ~ts",
['Elixir.Kernel':inspect(ERef)])
end;
-expand({ import, Meta, [Left] }, E) ->
- expand({ import, Meta, [Left, []]}, E);
+expand({import, Meta, [Left]}, E) ->
+ expand({import, Meta, [Left, []]}, E);
-expand({ import, Meta, [Ref, KV] }, E) ->
+expand({import, Meta, [Ref, KV]}, E) ->
assert_no_match_or_guard_scope(Meta, import, E),
- { ERef, ER } = expand(Ref, E),
- { EKV, ET } = expand_opts(Meta, import, [only, except, warn], KV, ER),
+ {ERef, ER} = expand(Ref, E),
+ {EKV, ET} = expand_opts(Meta, import, [only, except, warn], KV, ER),
if
is_atom(ERef) ->
elixir_aliases:ensure_loaded(Meta, ERef, ET),
- { Functions, Macros } = elixir_import:import(Meta, ERef, EKV, ET),
- { { import, Meta, [ERef, EKV] },
- expand_require(Meta, ERef, EKV, ET#elixir_env{functions=Functions, macros=Macros}) };
+ {Functions, Macros} = elixir_import:import(Meta, ERef, EKV, ET),
+ {{import, Meta, [ERef, EKV]},
+ expand_require(Meta, ERef, EKV, ET#elixir_env{functions=Functions, macros=Macros})};
true ->
compile_error(Meta, E#elixir_env.file,
"invalid argument for import, expected a compile time atom or alias, got: ~ts",
@@ -130,59 +130,59 @@ expand({ import, Meta, [Ref, KV] }, E) ->
%% Pseudo vars
-expand({ '__MODULE__', _, Atom }, E) when is_atom(Atom) ->
- { E#elixir_env.module, E };
-expand({ '__DIR__', _, Atom }, E) when is_atom(Atom) ->
- { filename:dirname(E#elixir_env.file), E };
-expand({ '__CALLER__', _, Atom } = Caller, E) when is_atom(Atom) ->
- { Caller, E };
-expand({ '__ENV__', Meta, Atom }, E) when is_atom(Atom) ->
- Env = elixir_env:env_to_ex({ ?line(Meta), E }),
- { { '{}', [], tuple_to_list(Env) }, E };
-expand({ { '.', DotMeta, [{ '__ENV__', Meta, Atom }, Field] }, CallMeta, [] }, E) when is_atom(Atom), is_atom(Field) ->
- Env = elixir_env:env_to_ex({ ?line(Meta), E }),
+expand({'__MODULE__', _, Atom}, E) when is_atom(Atom) ->
+ {E#elixir_env.module, E};
+expand({'__DIR__', _, Atom}, E) when is_atom(Atom) ->
+ {filename:dirname(E#elixir_env.file), E};
+expand({'__CALLER__', _, Atom} = Caller, E) when is_atom(Atom) ->
+ {Caller, E};
+expand({'__ENV__', Meta, Atom}, E) when is_atom(Atom) ->
+ Env = elixir_env:env_to_ex({?line(Meta), E}),
+ {{'{}', [], tuple_to_list(Env)}, E};
+expand({{'.', DotMeta, [{'__ENV__', Meta, Atom}, Field]}, CallMeta, []}, E) when is_atom(Atom), is_atom(Field) ->
+ Env = elixir_env:env_to_ex({?line(Meta), E}),
case erlang:function_exported('Elixir.Macro.Env', Field, 1) of
- true -> { Env:Field(), E };
- false -> { { { '.', DotMeta, [{ '{}', [], tuple_to_list(Env) }, Field] }, CallMeta, [] }, E }
+ true -> {Env:Field(), E};
+ false -> {{{'.', DotMeta, [{'{}', [], tuple_to_list(Env)}, Field]}, CallMeta, []}, E}
end;
%% Quote
-expand({ Unquote, Meta, [_] }, E) when Unquote == unquote; Unquote == unquote_splicing ->
+expand({Unquote, Meta, [_]}, E) when Unquote == unquote; Unquote == unquote_splicing ->
compile_error(Meta, E#elixir_env.file, "~p called outside quote", [Unquote]);
-expand({ quote, Meta, [Opts] }, E) when is_list(Opts) ->
+expand({quote, Meta, [Opts]}, E) when is_list(Opts) ->
case lists:keyfind(do, 1, Opts) of
- { do, Do } ->
- expand({ quote, Meta, [lists:keydelete(do, 1, Opts), [{do,Do}]] }, E);
+ {do, Do} ->
+ expand({quote, Meta, [lists:keydelete(do, 1, Opts), [{do,Do}]]}, E);
false ->
compile_error(Meta, E#elixir_env.file, "missing do keyword in quote")
end;
-expand({ quote, Meta, [_] }, E) ->
+expand({quote, Meta, [_]}, E) ->
compile_error(Meta, E#elixir_env.file, "invalid arguments for quote");
-expand({ quote, Meta, [KV, Do] }, E) when is_list(Do) ->
+expand({quote, Meta, [KV, Do]}, E) when is_list(Do) ->
Exprs =
case lists:keyfind(do, 1, Do) of
- { do, Expr } -> Expr;
+ {do, Expr} -> Expr;
false -> compile_error(Meta, E#elixir_scope.file, "missing do keyword in quote")
end,
ValidOpts = [hygiene, context, var_context, location, line, unquote, bind_quoted],
- { EKV, ET } = expand_opts(Meta, quote, ValidOpts, KV, E),
+ {EKV, ET} = expand_opts(Meta, quote, ValidOpts, KV, E),
Hygiene = case lists:keyfind(hygiene, 1, EKV) of
- { hygiene, List } when is_list(List) ->
+ {hygiene, List} when is_list(List) ->
List;
false ->
[]
end,
Context = case lists:keyfind(context, 1, EKV) of
- { context, Atom } when is_atom(Atom) ->
+ {context, Atom} when is_atom(Atom) ->
Atom;
- { context, Ctx } ->
+ {context, Ctx} ->
compile_error(Meta, E#elixir_env.file, "invalid :context for quote, "
"expected a compile time atom or alias, got: ~ts", ['Elixir.Kernel':inspect(Ctx)]);
false ->
@@ -192,46 +192,46 @@ expand({ quote, Meta, [KV, Do] }, E) when is_list(Do) ->
end
end,
- Vars = lists:keyfind(vars, 1, Hygiene) /= { vars, false },
- Aliases = lists:keyfind(aliases, 1, Hygiene) /= { aliases, false },
- Imports = lists:keyfind(imports, 1, Hygiene) /= { imports, false },
+ Vars = lists:keyfind(vars, 1, Hygiene) /= {vars, false},
+ Aliases = lists:keyfind(aliases, 1, Hygiene) /= {aliases, false},
+ Imports = lists:keyfind(imports, 1, Hygiene) /= {imports, false},
- Keep = lists:keyfind(location, 1, EKV) == { location, keep },
+ Keep = lists:keyfind(location, 1, EKV) == {location, keep},
Line = proplists:get_value(line, EKV, false),
- { Binding, DefaultUnquote } = case lists:keyfind(bind_quoted, 1, EKV) of
- { bind_quoted, BQ } -> { BQ, false };
- false -> { nil, true }
+ {Binding, DefaultUnquote} = case lists:keyfind(bind_quoted, 1, EKV) of
+ {bind_quoted, BQ} -> {BQ, false};
+ false -> {nil, true}
end,
Unquote = case lists:keyfind(unquote, 1, EKV) of
- { unquote, Bool } when is_boolean(Bool) -> Bool;
+ {unquote, Bool} when is_boolean(Bool) -> Bool;
false -> DefaultUnquote
end,
Q = #elixir_quote{vars_hygiene=Vars, line=Line, keep=Keep, unquote=Unquote,
aliases_hygiene=Aliases, imports_hygiene=Imports, context=Context},
- { Quoted, _Q } = elixir_quote:quote(Exprs, Binding, Q, ET),
+ {Quoted, _Q} = elixir_quote:quote(Exprs, Binding, Q, ET),
expand(Quoted, ET);
-expand({ quote, Meta, [_, _] }, E) ->
+expand({quote, Meta, [_, _]}, E) ->
compile_error(Meta, E#elixir_env.file, "invalid arguments for quote");
%% Functions
-expand({ '&', _, [Arg] } = Original, E) when is_integer(Arg) ->
- { Original, E };
-expand({ '&', Meta, [Arg] }, E) ->
+expand({'&', _, [Arg]} = Original, E) when is_integer(Arg) ->
+ {Original, E};
+expand({'&', Meta, [Arg]}, E) ->
assert_no_match_or_guard_scope(Meta, '&', E),
case elixir_fn:capture(Meta, Arg, E) of
- { local, Fun, Arity } ->
- { { '&', Meta, [{ '/', [], [{ Fun, [], nil }, Arity] }] }, E };
- { expanded, Expr, EE } ->
+ {local, Fun, Arity} ->
+ {{'&', Meta, [{'/', [], [{Fun, [], nil}, Arity]}]}, E};
+ {expanded, Expr, EE} ->
expand(Expr, EE)
end;
-expand({ fn, Meta, Pairs }, E) ->
+expand({fn, Meta, Pairs}, E) ->
assert_no_match_or_guard_scope(Meta, fn, E),
elixir_fn:expand(Meta, Pairs, E);
@@ -239,67 +239,67 @@ expand({ fn, Meta, Pairs }, E) ->
expand({'case', Meta, [Expr, KV]}, E) ->
assert_no_match_or_guard_scope(Meta, 'case', E),
- { EExpr, EE } = expand(Expr, E),
- { EClauses, EC } = elixir_exp_clauses:'case'(Meta, KV, EE),
- { { 'case', Meta, [EExpr, EClauses] }, EC };
+ {EExpr, EE} = expand(Expr, E),
+ {EClauses, EC} = elixir_exp_clauses:'case'(Meta, KV, EE),
+ {{'case', Meta, [EExpr, EClauses]}, EC};
expand({'receive', Meta, [KV]}, E) ->
assert_no_match_or_guard_scope(Meta, 'receive', E),
- { EClauses, EC } = elixir_exp_clauses:'receive'(Meta, KV, E),
- { { 'receive', Meta, [EClauses] }, EC };
+ {EClauses, EC} = elixir_exp_clauses:'receive'(Meta, KV, E),
+ {{'receive', Meta, [EClauses]}, EC};
expand({'try', Meta, [KV]}, E) ->
assert_no_match_or_guard_scope(Meta, 'try', E),
- { EClauses, EC } = elixir_exp_clauses:'try'(Meta, KV, E),
- { { 'try', Meta, [EClauses] }, EC };
+ {EClauses, EC} = elixir_exp_clauses:'try'(Meta, KV, E),
+ {{'try', Meta, [EClauses]}, EC};
%% Comprehensions
-expand({ Kind, Meta, Args }, E) when is_list(Args), (Kind == lc) orelse (Kind == bc) ->
+expand({Kind, Meta, Args}, E) when is_list(Args), (Kind == lc) orelse (Kind == bc) ->
expand_comprehension(Meta, Kind, Args, E);
-expand({ for, Meta, Args }, E) when is_list(Args) ->
+expand({for, Meta, Args}, E) when is_list(Args) ->
elixir_for:expand(Meta, Args, E);
%% Super
-expand({ super, Meta, Args }, E) when is_list(Args) ->
+expand({super, Meta, Args}, E) when is_list(Args) ->
assert_no_match_or_guard_scope(Meta, super, E),
- { EArgs, EA } = expand_args(Args, E),
- { { super, Meta, EArgs }, EA };
+ {EArgs, EA} = expand_args(Args, E),
+ {{super, Meta, EArgs}, EA};
%% Vars
-expand({ '^', Meta, [Arg] }, #elixir_env{context=match} = E) ->
+expand({'^', Meta, [Arg]}, #elixir_env{context=match} = E) ->
case expand(Arg, E) of
- { { Name, _, Kind } = EArg, EA } when is_atom(Name), is_atom(Kind) ->
- { { '^', Meta, [EArg] }, EA };
+ {{Name, _, Kind} = EArg, EA} when is_atom(Name), is_atom(Kind) ->
+ {{'^', Meta, [EArg]}, EA};
_ ->
Msg = "invalid argument for unary operator ^, expected an existing variable, got: ^~ts",
compile_error(Meta, E#elixir_env.file, Msg, ['Elixir.Macro':to_string(Arg)])
end;
-expand({ '^', Meta, [Arg] }, E) ->
+expand({'^', Meta, [Arg]}, E) ->
compile_error(Meta, E#elixir_env.file,
"cannot use ^~ts outside of match clauses", ['Elixir.Macro':to_string(Arg)]);
-expand({ '_', _, Kind } = Var, E) when is_atom(Kind) ->
- { Var, E };
-expand({ Name, Meta, Kind } = Var, #elixir_env{context=match, export_vars=Export} = E) when is_atom(Name), is_atom(Kind) ->
- Pair = { Name, var_kind(Meta, Kind) },
+expand({'_', _, Kind} = Var, E) when is_atom(Kind) ->
+ {Var, E};
+expand({Name, Meta, Kind} = Var, #elixir_env{context=match, export_vars=Export} = E) when is_atom(Name), is_atom(Kind) ->
+ Pair = {Name, var_kind(Meta, Kind)},
NewVars = ordsets:add_element(Pair, E#elixir_env.vars),
- NewExport = case (Export /= nil) andalso (lists:keyfind(export, 1, Meta) /= { export, false }) of
+ NewExport = case (Export /= nil) andalso (lists:keyfind(export, 1, Meta) /= {export, false}) of
true -> ordsets:add_element(Pair, Export);
false -> Export
end,
- { Var, E#elixir_env{vars=NewVars, export_vars=NewExport} };
-expand({ Name, Meta, Kind } = Var, #elixir_env{vars=Vars} = E) when is_atom(Name), is_atom(Kind) ->
- case lists:member({ Name, var_kind(Meta, Kind) }, Vars) of
+ {Var, E#elixir_env{vars=NewVars, export_vars=NewExport}};
+expand({Name, Meta, Kind} = Var, #elixir_env{vars=Vars} = E) when is_atom(Name), is_atom(Kind) ->
+ case lists:member({Name, var_kind(Meta, Kind)}, Vars) of
true ->
- { Var, E };
+ {Var, E};
false ->
VarMeta = lists:keyfind(var, 1, Meta),
if
- VarMeta == { var, true } ->
+ VarMeta == {var, true} ->
Extra = case Kind of
nil -> "";
_ -> io_lib:format(" (context ~ts)", [elixir_aliases:inspect(Kind)])
@@ -308,13 +308,13 @@ expand({ Name, Meta, Kind } = Var, #elixir_env{vars=Vars} = E) when is_atom(Name
compile_error(Meta, E#elixir_env.file, "expected var ~ts~ts to expand to an existing "
"variable or be a part of a match", [Name, Extra]);
true ->
- expand({ Name, Meta, [] }, E)
+ expand({Name, Meta, []}, E)
end
end;
%% Local calls
-expand({ Atom, Meta, Args }, E) when is_atom(Atom), is_list(Meta), is_list(Args) ->
+expand({Atom, Meta, Args}, E) when is_atom(Atom), is_list(Meta), is_list(Args) ->
assert_no_ambiguous_op(Atom, Meta, Args, E),
elixir_dispatch:dispatch_import(Meta, Atom, Args, E, fun() ->
@@ -323,9 +323,9 @@ expand({ Atom, Meta, Args }, E) when is_atom(Atom), is_list(Meta), is_list(Args)
%% Remote calls
-expand({ { '.', DotMeta, [Left, Right] }, Meta, Args }, E)
+expand({{'.', DotMeta, [Left, Right]}, Meta, Args}, E)
when (is_tuple(Left) orelse is_atom(Left)), is_atom(Right), is_list(Meta), is_list(Args) ->
- { ELeft, EL } = expand(Left, E),
+ {ELeft, EL} = expand(Left, E),
elixir_dispatch:dispatch_require(Meta, ELeft, Right, Args, EL, fun(AR, AF, AA) ->
expand_remote(AR, DotMeta, AF, Meta, AA, E, EL)
@@ -333,51 +333,51 @@ expand({ { '.', DotMeta, [Left, Right] }, Meta, Args }, E)
%% Anonymous calls
-expand({ { '.', DotMeta, [Expr] }, Meta, Args }, E) when is_list(Args) ->
- { EExpr, EE } = expand(Expr, E),
+expand({{'.', DotMeta, [Expr]}, Meta, Args}, E) when is_list(Args) ->
+ {EExpr, EE} = expand(Expr, E),
if
is_atom(EExpr) ->
compile_error(Meta, E#elixir_env.file, "invalid function call :~ts.()", [EExpr]);
true ->
- { EArgs, EA } = expand_args(Args, elixir_env:mergea(E, EE)),
- { { { '.', DotMeta, [EExpr] }, Meta, EArgs }, elixir_env:mergev(EE, EA) }
+ {EArgs, EA} = expand_args(Args, elixir_env:mergea(E, EE)),
+ {{{'.', DotMeta, [EExpr]}, Meta, EArgs}, elixir_env:mergev(EE, EA)}
end;
%% Invalid calls
-expand({ _, Meta, Args } = Invalid, E) when is_list(Meta) and is_list(Args) ->
+expand({_, Meta, Args} = Invalid, E) when is_list(Meta) and is_list(Args) ->
compile_error(Meta, E#elixir_env.file, "invalid call ~ts",
['Elixir.Macro':to_string(Invalid)]);
-expand({ _, _, _ } = Tuple, E) ->
+expand({_, _, _} = Tuple, E) ->
compile_error([{line,0}], E#elixir_env.file, "invalid quoted expression: ~ts",
['Elixir.Kernel':inspect(Tuple, [{records,false}])]);
%% Literals
-expand({ Left, Right }, E) ->
- { [ELeft, ERight], EE } = expand_args([Left, Right], E),
- { { ELeft, ERight }, EE };
+expand({Left, Right}, E) ->
+ {[ELeft, ERight], EE} = expand_args([Left, Right], E),
+ {{ELeft, ERight}, EE};
expand(List, #elixir_env{context=match} = E) when is_list(List) ->
expand_list(List, fun expand/2, E, []);
expand(List, E) when is_list(List) ->
- { EArgs, { EC, EV } } = expand_list(List, fun expand_arg/2, {E, E}, []),
- { EArgs, elixir_env:mergea(EV, EC) };
+ {EArgs, {EC, EV}} = expand_list(List, fun expand_arg/2, {E, E}, []),
+ {EArgs, elixir_env:mergea(EV, EC)};
expand(Function, E) when is_function(Function) ->
- case (erlang:fun_info(Function, type) == { type, external }) andalso
- (erlang:fun_info(Function, env) == { env, [] }) of
+ case (erlang:fun_info(Function, type) == {type, external}) andalso
+ (erlang:fun_info(Function, env) == {env, []}) of
true ->
- { Function, E };
+ {Function, E};
false ->
compile_error([{line,0}], E#elixir_env.file,
"invalid quoted expression: ~ts", ['Elixir.Kernel':inspect(Function)])
end;
expand(Other, E) when is_number(Other); is_atom(Other); is_binary(Other); is_pid(Other) ->
- { Other, E };
+ {Other, E};
expand(Other, E) ->
compile_error([{line,0}], E#elixir_env.file,
@@ -385,14 +385,14 @@ expand(Other, E) ->
%% Helpers
-expand_list([{ '|', Meta, [_, _] = Args }], Fun, Acc, List) ->
- { EArgs, EAcc } = lists:mapfoldl(Fun, Acc, Args),
- expand_list([], Fun, EAcc, [{ '|', Meta, EArgs }|List]);
+expand_list([{'|', Meta, [_, _] = Args}], Fun, Acc, List) ->
+ {EArgs, EAcc} = lists:mapfoldl(Fun, Acc, Args),
+ expand_list([], Fun, EAcc, [{'|', Meta, EArgs}|List]);
expand_list([H|T], Fun, Acc, List) ->
- { EArg, EAcc } = Fun(H, Acc),
+ {EArg, EAcc} = Fun(H, Acc),
expand_list(T, Fun, EAcc, [EArg|List]);
expand_list([], _Fun, Acc, List) ->
- { lists:reverse(List), Acc }.
+ {lists:reverse(List), Acc}.
expand_many(Args, E) ->
lists:mapfoldl(fun expand/2, E, Args).
@@ -411,22 +411,22 @@ expand_many(Args, E) ->
%%
%% However, lexical information is.
expand_arg(Arg, Acc) when is_number(Arg); is_atom(Arg); is_binary(Arg); is_pid(Arg) ->
- { Arg, Acc };
-expand_arg(Arg, { Acc1, Acc2 }) ->
- { EArg, EAcc } = expand(Arg, Acc1),
- { EArg, { elixir_env:mergea(Acc1, EAcc), elixir_env:mergev(Acc2, EAcc) } }.
+ {Arg, Acc};
+expand_arg(Arg, {Acc1, Acc2}) ->
+ {EArg, EAcc} = expand(Arg, Acc1),
+ {EArg, {elixir_env:mergea(Acc1, EAcc), elixir_env:mergev(Acc2, EAcc)}}.
expand_args(Args, #elixir_env{context=match} = E) ->
expand_many(Args, E);
expand_args(Args, E) ->
- { EArgs, { EC, EV } } = lists:mapfoldl(fun expand_arg/2, {E, E}, Args),
- { EArgs, elixir_env:mergea(EV, EC) }.
+ {EArgs, {EC, EV}} = lists:mapfoldl(fun expand_arg/2, {E, E}, Args),
+ {EArgs, elixir_env:mergea(EV, EC)}.
%% Match/var helpers
var_kind(Meta, Kind) ->
case lists:keyfind(counter, 1, Meta) of
- { counter, Counter } -> Counter;
+ {counter, Counter} -> Counter;
false -> Kind
end.
@@ -434,8 +434,8 @@ var_kind(Meta, Kind) ->
assert_no_ambiguous_op(Name, Meta, [Arg], E) ->
case lists:keyfind(ambiguous_op, 1, Meta) of
- { ambiguous_op, Kind } ->
- case lists:member({ Name, Kind }, E#elixir_env.vars) of
+ {ambiguous_op, Kind} ->
+ case lists:member({Name, Kind}, E#elixir_env.vars) of
true ->
compile_error(Meta, E#elixir_env.file, "\"~ts ~ts\" looks like a function call but "
"there is a variable named \"~ts\", please use explicit parenthesis or even spaces",
@@ -450,14 +450,14 @@ assert_no_ambiguous_op(_Atom, _Meta, _Args, _E) ->
ok.
expand_local(Meta, Name, Args, #elixir_env{local=nil, function=nil} = E) ->
- { EArgs, EA } = expand_args(Args, E),
- { { Name, Meta, EArgs }, EA };
+ {EArgs, EA} = expand_args(Args, E),
+ {{Name, Meta, EArgs}, EA};
expand_local(Meta, Name, Args, #elixir_env{local=nil, module=Module, function=Function} = E) ->
- elixir_locals:record_local({ Name, length(Args) }, Module, Function),
- { EArgs, EA } = expand_args(Args, E),
- { { Name, Meta, EArgs }, EA };
+ elixir_locals:record_local({Name, length(Args)}, Module, Function),
+ {EArgs, EA} = expand_args(Args, E),
+ {{Name, Meta, EArgs}, EA};
expand_local(Meta, Name, Args, E) ->
- expand({ { '.', Meta, [E#elixir_env.local, Name] }, Meta, Args }, E).
+ expand({{'.', Meta, [E#elixir_env.local, Name]}, Meta, Args}, E).
%% Remote
@@ -466,34 +466,34 @@ expand_remote(Receiver, DotMeta, Right, Meta, Args, E, EL) ->
is_atom(Receiver) -> elixir_lexical:record_remote(Receiver, E#elixir_env.lexical_tracker);
true -> ok
end,
- { EArgs, EA } = expand_args(Args, E),
- { { { '.', DotMeta, [Receiver, Right] }, Meta, EArgs }, elixir_env:mergev(EL, EA) }.
+ {EArgs, EA} = expand_args(Args, E),
+ {{{'.', DotMeta, [Receiver, Right]}, Meta, EArgs}, elixir_env:mergev(EL, EA)}.
%% Lexical helpers
expand_opts(Meta, Kind, Allowed, Opts, E) ->
- { EOpts, EE } = expand(Opts, E),
+ {EOpts, EE} = expand(Opts, E),
validate_opts(Meta, Kind, Allowed, EOpts, EE),
- { EOpts, EE }.
+ {EOpts, EE}.
validate_opts(Meta, Kind, Allowed, Opts, E) when is_list(Opts) ->
[begin
compile_error(Meta, E#elixir_env.file,
"unsupported option ~ts given to ~s", ['Elixir.Kernel':inspect(Key), Kind])
- end || { Key, _ } <- Opts, not lists:member(Key, Allowed)];
+ end || {Key, _} <- Opts, not lists:member(Key, Allowed)];
validate_opts(Meta, Kind, _Allowed, _Opts, S) ->
compile_error(Meta, S#elixir_scope.file, "invalid options for ~s, expected a keyword list", [Kind]).
no_alias_opts(KV) when is_list(KV) ->
case lists:keyfind(as, 1, KV) of
- { as, As } -> lists:keystore(as, 1, KV, { as, no_alias_expansion(As) });
+ {as, As} -> lists:keystore(as, 1, KV, {as, no_alias_expansion(As)});
false -> KV
end;
no_alias_opts(KV) -> KV.
-no_alias_expansion({ '__aliases__', Meta, [H|T] }) when (H /= 'Elixir') and is_atom(H) ->
- { '__aliases__', Meta, ['Elixir',H|T] };
+no_alias_expansion({'__aliases__', Meta, [H|T]}) when (H /= 'Elixir') and is_atom(H) ->
+ {'__aliases__', Meta, ['Elixir',H|T]};
no_alias_expansion(Other) ->
Other.
@@ -509,20 +509,20 @@ expand_alias(Meta, IncludeByDefault, Ref, KV, #elixir_env{context_modules=Contex
%% module in context modules.
NewContext =
case lists:keyfind(defined, 1, Meta) of
- { defined, Mod } when is_atom(Mod) -> [Mod|Context];
+ {defined, Mod} when is_atom(Mod) -> [Mod|Context];
false -> Context
end,
- { Aliases, MacroAliases } = elixir_aliases:store(Meta, New, Ref, KV, E#elixir_env.aliases,
+ {Aliases, MacroAliases} = elixir_aliases:store(Meta, New, Ref, KV, E#elixir_env.aliases,
E#elixir_env.macro_aliases, E#elixir_env.lexical_tracker),
E#elixir_env{aliases=Aliases, macro_aliases=MacroAliases, context_modules=NewContext}.
-expand_as({ as, true }, _Meta, _IncludeByDefault, Ref, _E) ->
+expand_as({as, true}, _Meta, _IncludeByDefault, Ref, _E) ->
elixir_aliases:last(Ref);
-expand_as({ as, false }, _Meta, _IncludeByDefault, Ref, _E) ->
+expand_as({as, false}, _Meta, _IncludeByDefault, Ref, _E) ->
Ref;
-expand_as({ as, Atom }, Meta, _IncludeByDefault, _Ref, E) when is_atom(Atom) ->
+expand_as({as, Atom}, Meta, _IncludeByDefault, _Ref, E) when is_atom(Atom) ->
case length(string:tokens(atom_to_list(Atom), ".")) of
1 -> compile_error(Meta, E#elixir_env.file,
"invalid value for keyword :as, expected an alias, got atom: ~ts", [elixir_aliases:inspect(Atom)]);
@@ -534,7 +534,7 @@ expand_as(false, _Meta, IncludeByDefault, Ref, _E) ->
if IncludeByDefault -> elixir_aliases:last(Ref);
true -> Ref
end;
-expand_as({ as, Other }, Meta, _IncludeByDefault, _Ref, E) ->
+expand_as({as, Other}, Meta, _IncludeByDefault, _Ref, E) ->
compile_error(Meta, E#elixir_env.file,
"invalid value for keyword :as, expected an alias, got: ~ts", ['Elixir.Macro':to_string(Other)]).
@@ -542,18 +542,18 @@ expand_as({ as, Other }, Meta, _IncludeByDefault, _Ref, E) ->
expand_comprehension(Meta, Kind, Args, E) ->
case elixir_utils:split_last(Args) of
- { Cases, [{do,Expr}] } ->
- { ECases, EC } = lists:mapfoldl(fun expand_comprehension_clause/2, E, Cases),
- { EExpr, _ } = expand(Expr, EC),
- { { Kind, Meta, ECases ++ [[{do,EExpr}]] }, E };
+ {Cases, [{do,Expr}]} ->
+ {ECases, EC} = lists:mapfoldl(fun expand_comprehension_clause/2, E, Cases),
+ {EExpr, _} = expand(Expr, EC),
+ {{Kind, Meta, ECases ++ [[{do,EExpr}]]}, E};
_ ->
compile_error(Meta, E#elixir_env.file, "missing do keyword in comprehension ~ts", [Kind])
end.
expand_comprehension_clause({Gen, Meta, [Left, Right]}, E) when Gen == inbits; Gen == inlist ->
- { ERight, ER } = expand(Right, E),
- { ELeft, EL } = elixir_exp_clauses:match(fun expand/2, Left, E),
- { { Gen, Meta, [ELeft, ERight] }, elixir_env:mergev(EL, ER) };
+ {ERight, ER} = expand(Right, E),
+ {ELeft, EL} = elixir_exp_clauses:match(fun expand/2, Left, E),
+ {{Gen, Meta, [ELeft, ERight]}, elixir_env:mergev(EL, ER)};
expand_comprehension_clause(X, E) ->
expand(X, E).
diff --git a/lib/elixir/src/elixir_exp_clauses.erl b/lib/elixir/src/elixir_exp_clauses.erl
index c812b47bd..ce01d321f 100644
--- a/lib/elixir/src/elixir_exp_clauses.erl
+++ b/lib/elixir/src/elixir_exp_clauses.erl
@@ -6,36 +6,36 @@
-include("elixir.hrl").
match(Fun, Expr, #elixir_env{context=Context} = E) ->
- { EExpr, EE } = Fun(Expr, E#elixir_env{context=match}),
- { EExpr, EE#elixir_env{context=Context} }.
+ {EExpr, EE} = Fun(Expr, E#elixir_env{context=match}),
+ {EExpr, EE#elixir_env{context=Context}}.
def(Fun, Args, Guards, Body, E) ->
- { EArgs, EA } = match(Fun, Args, E),
- { EGuards, EG } = guard(Guards, EA#elixir_env{context=guard}),
- { EBody, EB } = elixir_exp:expand(Body, EG#elixir_env{context=E#elixir_env.context}),
- { EArgs, EGuards, EBody, EB }.
+ {EArgs, EA} = match(Fun, Args, E),
+ {EGuards, EG} = guard(Guards, EA#elixir_env{context=guard}),
+ {EBody, EB} = elixir_exp:expand(Body, EG#elixir_env{context=E#elixir_env.context}),
+ {EArgs, EGuards, EBody, EB}.
-clause(Meta, Kind, Fun, { '->', ClauseMeta, [_, _] } = Clause, E) when is_function(Fun, 3) ->
+clause(Meta, Kind, Fun, {'->', ClauseMeta, [_, _]} = Clause, E) when is_function(Fun, 3) ->
clause(Meta, Kind, fun(X, Acc) -> Fun(ClauseMeta, X, Acc) end, Clause, E);
-clause(_Meta, _Kind, Fun, { '->', Meta, [Left, Right] }, E) ->
- { ELeft, EL } = head(Fun, Left, E),
- { ERight, ER } = elixir_exp:expand(Right, EL),
- { { '->', Meta, [ELeft, ERight] }, ER };
+clause(_Meta, _Kind, Fun, {'->', Meta, [Left, Right]}, E) ->
+ {ELeft, EL} = head(Fun, Left, E),
+ {ERight, ER} = elixir_exp:expand(Right, EL),
+ {{'->', Meta, [ELeft, ERight]}, ER};
clause(Meta, Kind, _Fun, _, E) ->
compile_error(Meta, E#elixir_env.file, "expected -> clauses in ~ts", [Kind]).
-head(Fun, [{ 'when', Meta, [_,_|_] = All }], E) ->
- { Args, Guard } = elixir_utils:split_last(All),
- { EArgs, EA } = match(Fun, Args, E),
- { EGuard, EG } = guard(Guard, EA#elixir_env{context=guard}),
- { [{ 'when', Meta, EArgs ++ [EGuard] }], EG#elixir_env{context=E#elixir_env.context} };
+head(Fun, [{'when', Meta, [_,_|_] = All}], E) ->
+ {Args, Guard} = elixir_utils:split_last(All),
+ {EArgs, EA} = match(Fun, Args, E),
+ {EGuard, EG} = guard(Guard, EA#elixir_env{context=guard}),
+ {[{'when', Meta, EArgs ++ [EGuard]}], EG#elixir_env{context=E#elixir_env.context}};
head(Fun, Args, E) ->
match(Fun, Args, E).
-guard({ 'when', Meta, [Left, Right] }, E) ->
- { ELeft, EL } = guard(Left, E),
- { ERight, ER } = guard(Right, EL),
- { { 'when', Meta, [ELeft, ERight] }, ER };
+guard({'when', Meta, [Left, Right]}, E) ->
+ {ELeft, EL} = guard(Left, E),
+ {ERight, ER} = guard(Right, EL),
+ {{'when', Meta, [ELeft, ERight]}, ER};
guard(Other, E) ->
elixir_exp:expand(Other, E).
@@ -47,12 +47,12 @@ guard(Other, E) ->
compile_error(Meta, E#elixir_env.file, "invalid arguments for case");
'case'(Meta, KV, E) ->
EE = E#elixir_env{export_vars=[]},
- { EClauses, EVars } = lists:mapfoldl(fun(X, Acc) -> do_case(Meta, X, Acc, EE) end, [], KV),
- { EClauses, elixir_env:mergev(EVars, E) }.
+ {EClauses, EVars} = lists:mapfoldl(fun(X, Acc) -> do_case(Meta, X, Acc, EE) end, [], KV),
+ {EClauses, elixir_env:mergev(EVars, E)}.
-do_case(Meta, { 'do', _ } = Do, Acc, E) ->
+do_case(Meta, {'do', _} = Do, Acc, E) ->
expand_with_export(Meta, 'case', expand_arg(Meta, 'case', 'do'), Do, Acc, E);
-do_case(Meta, { Key, _ }, _Acc, E) ->
+do_case(Meta, {Key, _}, _Acc, E) ->
compile_error(Meta, E#elixir_env.file, "unexpected keyword ~ts in case", [Key]).
%% Receive
@@ -63,21 +63,21 @@ do_case(Meta, { Key, _ }, _Acc, E) ->
compile_error(Meta, E#elixir_env.file, "invalid arguments for receive");
'receive'(Meta, KV, E) ->
EE = E#elixir_env{export_vars=[]},
- { EClauses, EVars } = lists:mapfoldl(fun(X, Acc) -> do_receive(Meta, X, Acc, EE) end, [], KV),
- { EClauses, elixir_env:mergev(EVars, E) }.
+ {EClauses, EVars} = lists:mapfoldl(fun(X, Acc) -> do_receive(Meta, X, Acc, EE) end, [], KV),
+ {EClauses, elixir_env:mergev(EVars, E)}.
-do_receive(_Meta, { 'do', nil } = Do, Acc, _E) ->
- { Do, Acc };
-do_receive(Meta, { 'do', _ } = Do, Acc, E) ->
+do_receive(_Meta, {'do', nil} = Do, Acc, _E) ->
+ {Do, Acc};
+do_receive(Meta, {'do', _} = Do, Acc, E) ->
expand_with_export(Meta, 'receive', expand_arg(Meta, 'receive', 'do'), Do, Acc, E);
-do_receive(_Meta, { 'after', [{ '->', Meta, [[Left], Right] }] }, Acc, E) ->
- { ELeft, EL } = elixir_exp:expand(Left, E),
- { ERight, ER } = elixir_exp:expand(Right, EL),
- EClause = { 'after', [{ '->', Meta, [[ELeft], ERight] }] },
- { EClause, elixir_env:merge_vars(Acc, ER#elixir_env.export_vars) };
-do_receive(Meta, { 'after', _ }, _Acc, E) ->
+do_receive(_Meta, {'after', [{'->', Meta, [[Left], Right]}]}, Acc, E) ->
+ {ELeft, EL} = elixir_exp:expand(Left, E),
+ {ERight, ER} = elixir_exp:expand(Right, EL),
+ EClause = {'after', [{'->', Meta, [[ELeft], ERight]}]},
+ {EClause, elixir_env:merge_vars(Acc, ER#elixir_env.export_vars)};
+do_receive(Meta, {'after', _}, _Acc, E) ->
compile_error(Meta, E#elixir_env.file, "expected a single -> clause for after in receive");
-do_receive(Meta, { Key, _ }, _Acc, E) ->
+do_receive(Meta, {Key, _}, _Acc, E) ->
compile_error(Meta, E#elixir_env.file, "unexpected keyword ~ts in receive", [Key]).
%% Try
@@ -87,27 +87,27 @@ do_receive(Meta, { Key, _ }, _Acc, E) ->
'try'(Meta, KV, E) when not is_list(KV) ->
elixir_errors:compile_error(Meta, E#elixir_env.file, "invalid arguments for try");
'try'(Meta, KV, E) ->
- { lists:map(fun(X) -> do_try(Meta, X, E) end, KV), E }.
-
-do_try(_Meta, { 'do', Expr }, E) ->
- { EExpr, _ } = elixir_exp:expand(Expr, E),
- { 'do', EExpr };
-do_try(_Meta, { 'after', Expr }, E) ->
- { EExpr, _ } = elixir_exp:expand(Expr, E),
- { 'after', EExpr };
-do_try(Meta, { 'else', _ } = Else, E) ->
+ {lists:map(fun(X) -> do_try(Meta, X, E) end, KV), E}.
+
+do_try(_Meta, {'do', Expr}, E) ->
+ {EExpr, _} = elixir_exp:expand(Expr, E),
+ {'do', EExpr};
+do_try(_Meta, {'after', Expr}, E) ->
+ {EExpr, _} = elixir_exp:expand(Expr, E),
+ {'after', EExpr};
+do_try(Meta, {'else', _} = Else, E) ->
expand_without_export(Meta, 'try', expand_arg(Meta, 'try', 'else'), Else, E);
-do_try(Meta, { 'catch', _ } = Catch, E) ->
+do_try(Meta, {'catch', _} = Catch, E) ->
expand_without_export(Meta, 'try', fun elixir_exp:expand_args/2, Catch, E);
-do_try(Meta, { 'rescue', _ } = Rescue, E) ->
+do_try(Meta, {'rescue', _} = Rescue, E) ->
expand_without_export(Meta, 'try', fun expand_rescue/3, Rescue, E);
-do_try(Meta, { Key, _ }, E) ->
+do_try(Meta, {Key, _}, E) ->
compile_error(Meta, E#elixir_env.file, "unexpected keyword ~ts in try", [Key]).
expand_rescue(Meta, [Arg], E) ->
case expand_rescue(Arg, E) of
- { EArg, EA } ->
- { [EArg], EA };
+ {EArg, EA} ->
+ {[EArg], EA};
false ->
compile_error(Meta, E#elixir_env.file, "invalid rescue clause. The clause should "
"match on an alias, a variable or be in the `var in [alias]` format")
@@ -116,19 +116,19 @@ expand_rescue(Meta, _, E) ->
compile_error(Meta, E#elixir_env.file, "expected one arg for rescue clauses (->) in try").
%% rescue var => var in _
-expand_rescue({ Name, _, Atom } = Var, E) when is_atom(Name), is_atom(Atom) ->
- expand_rescue({ in, [], [Var, { '_', [], E#elixir_env.module }] }, E);
+expand_rescue({Name, _, Atom} = Var, E) when is_atom(Name), is_atom(Atom) ->
+ expand_rescue({in, [], [Var, {'_', [], E#elixir_env.module}]}, E);
%% rescue var in [Exprs]
-expand_rescue({ in, Meta, [Left, Right] }, E) ->
- { ERight, ER } = elixir_exp:expand(Right, E#elixir_env{context=nil}),
- { ELeft, EL } = elixir_exp:expand(Left, ER#elixir_env{context=match}),
+expand_rescue({in, Meta, [Left, Right]}, E) ->
+ {ERight, ER} = elixir_exp:expand(Right, E#elixir_env{context=nil}),
+ {ELeft, EL} = elixir_exp:expand(Left, ER#elixir_env{context=match}),
case ELeft of
- { Name, _, Atom } when is_atom(Name), is_atom(Atom) ->
+ {Name, _, Atom} when is_atom(Name), is_atom(Atom) ->
case normalize_rescue(ERight) of
false -> false;
- Other -> { { in, Meta, [ELeft, Other] }, EL }
+ Other -> {{in, Meta, [ELeft, Other]}, EL}
end;
_ ->
false
@@ -136,27 +136,27 @@ expand_rescue({ in, Meta, [Left, Right] }, E) ->
%% rescue Error => _ in [Error]
expand_rescue(Arg, E) ->
- expand_rescue({ in, [], [{ '_', [], E#elixir_env.module }, Arg] }, E).
+ expand_rescue({in, [], [{'_', [], E#elixir_env.module}, Arg]}, E).
-normalize_rescue({ '_', _, Atom } = N) when is_atom(Atom) -> N;
+normalize_rescue({'_', _, Atom} = N) when is_atom(Atom) -> N;
normalize_rescue(Atom) when is_atom(Atom) -> [Atom];
normalize_rescue(Other) ->
is_list(Other)
andalso lists:all(fun is_var_or_atom/1, Other)
andalso Other.
-is_var_or_atom({ Name, _, Atom }) when is_atom(Name), is_atom(Atom) -> true;
+is_var_or_atom({Name, _, Atom}) when is_atom(Name), is_atom(Atom) -> true;
is_var_or_atom(Atom) when is_atom(Atom) -> true;
is_var_or_atom(_) -> false.
%% Expansion helpers
-export_vars({ Left, Meta, Right }) when is_atom(Left), is_list(Meta), is_atom(Right) ->
- { Left, [{export,false}|Meta], Right };
-export_vars({ Left, Meta, Right }) ->
- { export_vars(Left), Meta, export_vars(Right) };
-export_vars({ Left, Right }) ->
- { export_vars(Left), export_vars(Right) };
+export_vars({Left, Meta, Right}) when is_atom(Left), is_list(Meta), is_atom(Right) ->
+ {Left, [{export,false}|Meta], Right};
+export_vars({Left, Meta, Right}) ->
+ {export_vars(Left), Meta, export_vars(Right)};
+export_vars({Left, Right}) ->
+ {export_vars(Left), export_vars(Right)};
export_vars(List) when is_list(List) ->
[export_vars(X) || X <- List];
export_vars(Other) ->
@@ -167,34 +167,34 @@ export_vars(Other) ->
expand_arg(Meta, Kind, Key) ->
fun
([Arg], E) ->
- { EArg, EA } = elixir_exp:expand(Arg, E),
- { [EArg], EA };
+ {EArg, EA} = elixir_exp:expand(Arg, E),
+ {[EArg], EA};
(_, E) ->
compile_error(Meta, E#elixir_env.file, "expected one arg for ~ts clauses (->) in ~ts", [Key, Kind])
end.
%% Expands all -> pairs in a given key keeping the overall vars.
-expand_with_export(Meta, Kind, Fun, { Key, Clauses }, Acc, E) when is_list(Clauses) ->
+expand_with_export(Meta, Kind, Fun, {Key, Clauses}, Acc, E) when is_list(Clauses) ->
EFun =
case lists:keyfind(export_head, 1, Meta) of
- { export_head, true } -> Fun;
+ {export_head, true} -> Fun;
_ -> fun(ExportArgs, ExportE) -> Fun(export_vars(ExportArgs), ExportE) end
end,
Transformer = fun(Clause, Vars) ->
- { EClause, EC } = clause(Meta, Kind, EFun, Clause, E),
- { EClause, elixir_env:merge_vars(Vars, EC#elixir_env.export_vars) }
+ {EClause, EC} = clause(Meta, Kind, EFun, Clause, E),
+ {EClause, elixir_env:merge_vars(Vars, EC#elixir_env.export_vars)}
end,
- { EClauses, EVars } = lists:mapfoldl(Transformer, Acc, Clauses),
- { { Key, EClauses }, EVars };
-expand_with_export(Meta, Kind, _Fun, { Key, _ }, _Acc, E) ->
+ {EClauses, EVars} = lists:mapfoldl(Transformer, Acc, Clauses),
+ {{Key, EClauses}, EVars};
+expand_with_export(Meta, Kind, _Fun, {Key, _}, _Acc, E) ->
compile_error(Meta, E#elixir_env.file, "expected -> clauses for ~ts in ~ts", [Key, Kind]).
%% Expands all -> pairs in a given key but do not keep the overall vars.
-expand_without_export(Meta, Kind, Fun, { Key, Clauses }, E) when is_list(Clauses) ->
+expand_without_export(Meta, Kind, Fun, {Key, Clauses}, E) when is_list(Clauses) ->
Transformer = fun(Clause) ->
- { EClause, _ } = clause(Meta, Kind, Fun, Clause, E),
+ {EClause, _} = clause(Meta, Kind, Fun, Clause, E),
EClause
end,
- { Key, lists:map(Transformer, Clauses) };
-expand_without_export(Meta, Kind, _Fun, { Key, _ }, E) ->
+ {Key, lists:map(Transformer, Clauses)};
+expand_without_export(Meta, Kind, _Fun, {Key, _}, E) ->
compile_error(Meta, E#elixir_env.file, "expected -> clauses for ~ts in ~ts", [Key, Kind]).
diff --git a/lib/elixir/src/elixir_fn.erl b/lib/elixir/src/elixir_fn.erl
index 482e59027..b5f1527f4 100644
--- a/lib/elixir/src/elixir_fn.erl
+++ b/lib/elixir/src/elixir_fn.erl
@@ -4,74 +4,74 @@
-include("elixir.hrl").
translate(Meta, Clauses, S) ->
- Transformer = fun({ '->', CMeta, [ArgsWithGuards, Expr] }, Acc) ->
- { Args, Guards } = elixir_clauses:extract_splat_guards(ArgsWithGuards),
- { TClause, TS } = elixir_clauses:clause(?line(CMeta), fun translate_fn_match/2,
+ Transformer = fun({'->', CMeta, [ArgsWithGuards, Expr]}, Acc) ->
+ {Args, Guards} = elixir_clauses:extract_splat_guards(ArgsWithGuards),
+ {TClause, TS } = elixir_clauses:clause(?line(CMeta), fun translate_fn_match/2,
Args, Expr, Guards, true, Acc),
- { TClause, elixir_scope:mergef(S, TS) }
+ {TClause, elixir_scope:mergef(S, TS)}
end,
- { TClauses, NS } = lists:mapfoldl(Transformer, S, Clauses),
- Arities = [length(Args) || { clause, _Line, Args, _Guards, _Exprs } <- TClauses],
+ {TClauses, NS} = lists:mapfoldl(Transformer, S, Clauses),
+ Arities = [length(Args) || {clause, _Line, Args, _Guards, _Exprs} <- TClauses],
case lists:usort(Arities) of
[_] ->
- { { 'fun', ?line(Meta), { clauses, TClauses } }, NS };
+ {{'fun', ?line(Meta), {clauses, TClauses}}, NS};
_ ->
compile_error(Meta, S#elixir_scope.file,
"cannot mix clauses with different arities in function definition")
end.
translate_fn_match(Arg, S) ->
- { TArg, TS } = elixir_translator:translate_args(Arg, S#elixir_scope{backup_vars=orddict:new()}),
- { TArg, TS#elixir_scope{backup_vars=S#elixir_scope.backup_vars} }.
+ {TArg, TS} = elixir_translator:translate_args(Arg, S#elixir_scope{backup_vars=orddict:new()}),
+ {TArg, TS#elixir_scope{backup_vars=S#elixir_scope.backup_vars}}.
%% Expansion
expand(Meta, Clauses, E) when is_list(Clauses) ->
Transformer = fun(Clause) ->
- { EClause, _ } = elixir_exp_clauses:clause(Meta, fn, fun elixir_exp:expand_args/2, Clause, E),
+ {EClause, _} = elixir_exp_clauses:clause(Meta, fn, fun elixir_exp:expand_args/2, Clause, E),
EClause
end,
- { { fn, Meta, lists:map(Transformer, Clauses) }, E }.
+ {{fn, Meta, lists:map(Transformer, Clauses)}, E}.
%% Capture
-capture(Meta, { '/', _, [{ { '.', _, [_, F] } = Dot, RequireMeta , [] }, A] }, E) when is_atom(F), is_integer(A) ->
- Args = [{ '&', [], [X] } || X <- lists:seq(1, A)],
- capture_require(Meta, { Dot, RequireMeta, Args }, E, true);
+capture(Meta, {'/', _, [{{'.', _, [_, F]} = Dot, RequireMeta , []}, A]}, E) when is_atom(F), is_integer(A) ->
+ Args = [{'&', [], [X]} || X <- lists:seq(1, A)],
+ capture_require(Meta, {Dot, RequireMeta, Args}, E, true);
-capture(Meta, { '/', _, [{ F, _, C }, A] }, E) when is_atom(F), is_integer(A), is_atom(C) ->
+capture(Meta, {'/', _, [{F, _, C}, A]}, E) when is_atom(F), is_integer(A), is_atom(C) ->
ImportMeta =
case lists:keyfind(import_fa, 1, Meta) of
- { import_fa, { Receiver, Context } } ->
+ {import_fa, {Receiver, Context}} ->
lists:keystore(context, 1,
- lists:keystore(import, 1, Meta, { import, Receiver }),
- { context, Context }
+ lists:keystore(import, 1, Meta, {import, Receiver}),
+ {context, Context}
);
false -> Meta
end,
- Args = [{ '&', [], [X] } || X <- lists:seq(1, A)],
- capture_import(Meta, { F, ImportMeta, Args }, E, true);
+ Args = [{'&', [], [X]} || X <- lists:seq(1, A)],
+ capture_import(Meta, {F, ImportMeta, Args}, E, true);
-capture(Meta, { { '.', _, [_, Fun] }, _, Args } = Expr, E) when is_atom(Fun), is_list(Args) ->
+capture(Meta, {{'.', _, [_, Fun]}, _, Args} = Expr, E) when is_atom(Fun), is_list(Args) ->
capture_require(Meta, Expr, E, is_sequential_and_not_empty(Args));
-capture(Meta, { { '.', _, [_] }, _, Args } = Expr, E) when is_list(Args) ->
+capture(Meta, {{'.', _, [_]}, _, Args} = Expr, E) when is_list(Args) ->
do_capture(Meta, Expr, E, false);
-capture(Meta, { '__block__', _, [Expr] }, E) ->
+capture(Meta, {'__block__', _, [Expr]}, E) ->
capture(Meta, Expr, E);
-capture(Meta, { '__block__', _, _ } = Expr, E) ->
+capture(Meta, {'__block__', _, _} = Expr, E) ->
Message = "invalid args for &, block expressions are not allowed, got: ~ts",
compile_error(Meta, E#elixir_env.file, Message, ['Elixir.Macro':to_string(Expr)]);
-capture(Meta, { Atom, _, Args } = Expr, E) when is_atom(Atom), is_list(Args) ->
+capture(Meta, {Atom, _, Args} = Expr, E) when is_atom(Atom), is_list(Args) ->
capture_import(Meta, Expr, E, is_sequential_and_not_empty(Args));
-capture(Meta, { Left, Right }, E) ->
- capture(Meta, { '{}', Meta, [Left, Right] }, E);
+capture(Meta, {Left, Right}, E) ->
+ capture(Meta, {'{}', Meta, [Left, Right]}, E);
capture(Meta, List, E) when is_list(List) ->
do_capture(Meta, List, E, is_sequential_and_not_empty(List));
@@ -79,33 +79,33 @@ capture(Meta, List, E) when is_list(List) ->
capture(Meta, Arg, E) ->
invalid_capture(Meta, Arg, E).
-capture_import(Meta, { Atom, ImportMeta, Args } = Expr, E, Sequential) ->
+capture_import(Meta, {Atom, ImportMeta, Args} = Expr, E, Sequential) ->
Res = Sequential andalso
elixir_dispatch:import_function(ImportMeta, Atom, length(Args), E),
handle_capture(Res, Meta, Expr, E, Sequential).
-capture_require(Meta, { { '.', _, [Left, Right] }, RequireMeta, Args } = Expr, E, Sequential) ->
- { Mod, EE } = elixir_exp:expand(Left, E),
+capture_require(Meta, {{'.', _, [Left, Right]}, RequireMeta, Args} = Expr, E, Sequential) ->
+ {Mod, EE} = elixir_exp:expand(Left, E),
Res = Sequential andalso is_atom(Mod) andalso
elixir_dispatch:require_function(RequireMeta, Mod, Right, length(Args), EE),
handle_capture(Res, Meta, Expr, EE, Sequential).
-handle_capture({ local, Fun, Arity }, _Meta, _Expr, _E, _Sequential) ->
- { local, Fun, Arity };
-handle_capture({ remote, Receiver, Fun, Arity }, Meta, _Expr, E, _Sequential) ->
- Tree = { { '.', [], [erlang, make_fun] }, Meta, [Receiver, Fun, Arity] },
- { expanded, Tree, E };
+handle_capture({local, Fun, Arity}, _Meta, _Expr, _E, _Sequential) ->
+ {local, Fun, Arity};
+handle_capture({remote, Receiver, Fun, Arity}, Meta, _Expr, E, _Sequential) ->
+ Tree = {{'.', [], [erlang, make_fun]}, Meta, [Receiver, Fun, Arity]},
+ {expanded, Tree, E};
handle_capture(false, Meta, Expr, E, Sequential) ->
do_capture(Meta, Expr, E, Sequential).
do_capture(Meta, Expr, E, Sequential) ->
case do_escape(Expr, elixir_counter:next(), E, []) of
- { _, [] } when not Sequential ->
+ {_, []} when not Sequential ->
invalid_capture(Meta, Expr, E);
- { EExpr, EDict } ->
+ {EExpr, EDict} ->
EVars = validate(Meta, EDict, 1, E),
- Fn = { fn, Meta, [{ '->', Meta, [EVars, EExpr]}]},
- { expanded, Fn, E }
+ Fn = {fn, Meta, [{'->', Meta, [EVars, EExpr]}]},
+ {expanded, Fn, E}
end.
invalid_capture(Meta, Arg, E) ->
@@ -113,46 +113,46 @@ invalid_capture(Meta, Arg, E) ->
"&local/arity or a capture containing at least one argument as &1, got: ~ts",
compile_error(Meta, E#elixir_env.file, Message, ['Elixir.Macro':to_string(Arg)]).
-validate(Meta, [{ Pos, Var }|T], Pos, E) ->
+validate(Meta, [{Pos, Var}|T], Pos, E) ->
[Var|validate(Meta, T, Pos + 1, E)];
-validate(Meta, [{ Pos, _ }|_], Expected, E) ->
+validate(Meta, [{Pos, _}|_], Expected, E) ->
compile_error(Meta, E#elixir_env.file, "capture &~B cannot be defined without &~B", [Pos, Expected]);
validate(_Meta, [], _Pos, _E) ->
[].
-do_escape({ '&', _, [Pos] }, Counter, _E, Dict) when is_integer(Pos), Pos > 0 ->
- Var = { list_to_atom([$x, $@+Pos]), [{ counter, Counter }], elixir_fn },
- { Var, orddict:store(Pos, Var, Dict) };
+do_escape({'&', _, [Pos]}, Counter, _E, Dict) when is_integer(Pos), Pos > 0 ->
+ Var = {list_to_atom([$x, $@+Pos]), [{counter, Counter}], elixir_fn},
+ {Var, orddict:store(Pos, Var, Dict)};
-do_escape({ '&', Meta, [Pos] }, _Counter, E, _Dict) when is_integer(Pos) ->
+do_escape({'&', Meta, [Pos]}, _Counter, E, _Dict) when is_integer(Pos) ->
compile_error(Meta, E#elixir_env.file, "capture &~B is not allowed", [Pos]);
-do_escape({ '&', Meta, _ } = Arg, _Counter, E, _Dict) ->
+do_escape({'&', Meta, _} = Arg, _Counter, E, _Dict) ->
Message = "nested captures via & are not allowed: ~ts",
compile_error(Meta, E#elixir_env.file, Message, ['Elixir.Macro':to_string(Arg)]);
-do_escape({ Left, Meta, Right }, Counter, E, Dict0) ->
- { TLeft, Dict1 } = do_escape(Left, Counter, E, Dict0),
- { TRight, Dict2 } = do_escape(Right, Counter, E, Dict1),
- { { TLeft, Meta, TRight }, Dict2 };
+do_escape({Left, Meta, Right}, Counter, E, Dict0) ->
+ {TLeft, Dict1} = do_escape(Left, Counter, E, Dict0),
+ {TRight, Dict2} = do_escape(Right, Counter, E, Dict1),
+ {{TLeft, Meta, TRight}, Dict2};
-do_escape({ Left, Right }, Counter, E, Dict0) ->
- { TLeft, Dict1 } = do_escape(Left, Counter, E, Dict0),
- { TRight, Dict2 } = do_escape(Right, Counter, E, Dict1),
- { { TLeft, TRight }, Dict2 };
+do_escape({Left, Right}, Counter, E, Dict0) ->
+ {TLeft, Dict1} = do_escape(Left, Counter, E, Dict0),
+ {TRight, Dict2} = do_escape(Right, Counter, E, Dict1),
+ {{TLeft, TRight}, Dict2};
do_escape(List, Counter, E, Dict) when is_list(List) ->
lists:mapfoldl(fun(X, Acc) -> do_escape(X, Counter, E, Acc) end, Dict, List);
do_escape(Other, _Counter, _E, Dict) ->
- { Other, Dict }.
+ {Other, Dict}.
is_sequential_and_not_empty([]) -> false;
is_sequential_and_not_empty(List) -> is_sequential(List, 1).
-is_sequential([{ '&', _, [Int] }|T], Int) ->
+is_sequential([{'&', _, [Int]}|T], Int) ->
is_sequential(T, Int + 1);
is_sequential([], _Int) -> true;
is_sequential(_, _Int) -> false.
diff --git a/lib/elixir/src/elixir_for.erl b/lib/elixir/src/elixir_for.erl
index 8a7e04375..312ab9ad3 100644
--- a/lib/elixir/src/elixir_for.erl
+++ b/lib/elixir/src/elixir_for.erl
@@ -5,42 +5,42 @@
%% Expansion
expand(Meta, Args, E) ->
- { Cases, Block } =
+ {Cases, Block} =
case elixir_utils:split_last(Args) of
- { OuterCases, OuterOpts } when is_list(OuterOpts) ->
+ {OuterCases, OuterOpts} when is_list(OuterOpts) ->
case elixir_utils:split_last(OuterCases) of
- { InnerCases, InnerOpts } when is_list(InnerOpts) ->
- { InnerCases, InnerOpts ++ OuterOpts };
+ {InnerCases, InnerOpts} when is_list(InnerOpts) ->
+ {InnerCases, InnerOpts ++ OuterOpts};
_ ->
- { OuterCases, OuterOpts }
+ {OuterCases, OuterOpts}
end;
_ ->
- { Args, [] }
+ {Args, []}
end,
- { Expr, Opts } =
+ {Expr, Opts} =
case lists:keyfind(do, 1, Block) of
- { do, Do } -> { Do, lists:keydelete(do, 1, Block) };
+ {do, Do} -> {Do, lists:keydelete(do, 1, Block)};
_ -> elixir_errors:compile_error(Meta, E#elixir_env.file,
"missing do keyword in for comprehension")
end,
- { EOpts, EO } = elixir_exp:expand(Opts, E),
- { ECases, EC } = lists:mapfoldl(fun expand/2, EO, Cases),
- { EExpr, _ } = elixir_exp:expand(Expr, EC),
- { { for, Meta, ECases ++ [[{do,EExpr}|EOpts]] }, E }.
+ {EOpts, EO} = elixir_exp:expand(Opts, E),
+ {ECases, EC} = lists:mapfoldl(fun expand/2, EO, Cases),
+ {EExpr, _} = elixir_exp:expand(Expr, EC),
+ {{for, Meta, ECases ++ [[{do,EExpr}|EOpts]]}, E}.
expand({'<-', Meta, [Left, Right]}, E) ->
- { ERight, ER } = elixir_exp:expand(Right, E),
- { ELeft, EL } = elixir_exp_clauses:match(fun elixir_exp:expand/2, Left, E),
- { { '<-', Meta, [ELeft, ERight] }, elixir_env:mergev(EL, ER) };
-expand({ '<<>>', Meta, Args } = X, E) when is_list(Args) ->
+ {ERight, ER} = elixir_exp:expand(Right, E),
+ {ELeft, EL} = elixir_exp_clauses:match(fun elixir_exp:expand/2, Left, E),
+ {{'<-', Meta, [ELeft, ERight]}, elixir_env:mergev(EL, ER)};
+expand({'<<>>', Meta, Args} = X, E) when is_list(Args) ->
case elixir_utils:split_last(Args) of
- { LeftStart, {'<-', OpMeta, [LeftEnd, Right] } } ->
- { ERight, ER } = elixir_exp:expand(Right, E),
- Left = { '<<>>', Meta, LeftStart ++ [LeftEnd] },
- { ELeft, EL } = elixir_exp_clauses:match(fun elixir_exp:expand/2, Left, E),
- { { '<<>>', [], [ { '<-', OpMeta, [ELeft, ERight] }] }, elixir_env:mergev(EL, ER) };
+ {LeftStart, {'<-', OpMeta, [LeftEnd, Right]}} ->
+ {ERight, ER} = elixir_exp:expand(Right, E),
+ Left = {'<<>>', Meta, LeftStart ++ [LeftEnd]},
+ {ELeft, EL} = elixir_exp_clauses:match(fun elixir_exp:expand/2, Left, E),
+ {{'<<>>', [], [ {'<-', OpMeta, [ELeft, ERight]}]}, elixir_env:mergev(EL, ER)};
_ ->
elixir_exp:expand(X, E)
end;
@@ -51,40 +51,40 @@ expand(X, E) ->
translate(Meta, Args, #elixir_scope{return=Return} = RS) ->
S = RS#elixir_scope{return=true},
- { AccName, _, SA } = elixir_scope:build_var('_', S),
- { VarName, _, SV } = elixir_scope:build_var('_', SA),
+ {AccName, _, SA} = elixir_scope:build_var('_', S),
+ {VarName, _, SV} = elixir_scope:build_var('_', SA),
Line = ?line(Meta),
- Acc = { var, Line, AccName },
- Var = { var, Line, VarName },
+ Acc = {var, Line, AccName},
+ Var = {var, Line, VarName},
- { Cases, [{do,Expr}|Opts] } = elixir_utils:split_last(Args),
+ {Cases, [{do,Expr}|Opts]} = elixir_utils:split_last(Args),
- { TInto, SI } =
+ {TInto, SI} =
case lists:keyfind(into, 1, Opts) of
- { into, Into } -> elixir_translator:translate(Into, SV);
- false when Return -> { { nil, Line }, SV };
- false -> { false, SV }
+ {into, Into} -> elixir_translator:translate(Into, SV);
+ false when Return -> {{nil, Line}, SV};
+ false -> {false, SV}
end,
- { TCases, SC } = translate_gen(Meta, Cases, [], SI),
- { TExpr, SE } = elixir_translator:translate_block(Expr, Return, SC),
+ {TCases, SC} = translate_gen(Meta, Cases, [], SI),
+ {TExpr, SE} = elixir_translator:translate_block(Expr, Return, SC),
SF = elixir_scope:mergef(SI, SE),
case comprehension_expr(TInto, TExpr) of
- { inline, TIntoExpr } ->
- { build_inline(Line, TCases, TIntoExpr, TInto, Var, Acc, SE), SF };
- { into, TIntoExpr } ->
+ {inline, TIntoExpr} ->
+ {build_inline(Line, TCases, TIntoExpr, TInto, Var, Acc, SE), SF};
+ {into, TIntoExpr} ->
build_into(Line, TCases, TIntoExpr, TInto, Var, Acc, SF)
end.
-translate_gen(ForMeta, [{ '<-', Meta, [Left, Right] }|T], Acc, S) ->
- { TLeft, TRight, TFilters, TT, TS } = translate_gen(Meta, Left, Right, T, S),
- TAcc = [{ enum, Meta, TLeft, TRight, TFilters }|Acc],
+translate_gen(ForMeta, [{'<-', Meta, [Left, Right]}|T], Acc, S) ->
+ {TLeft, TRight, TFilters, TT, TS} = translate_gen(Meta, Left, Right, T, S),
+ TAcc = [{enum, Meta, TLeft, TRight, TFilters}|Acc],
translate_gen(ForMeta, TT, TAcc, TS);
-translate_gen(ForMeta, [{ '<<>>', _, [ { '<-', Meta, [Left, Right] } ] }|T], Acc, S) ->
- { TLeft, TRight, TFilters, TT, TS } = translate_gen(Meta, Left, Right, T, S),
- TAcc = [{ bin, Meta, TLeft, TRight, TFilters }|Acc],
+translate_gen(ForMeta, [{'<<>>', _, [ {'<-', Meta, [Left, Right]} ]}|T], Acc, S) ->
+ {TLeft, TRight, TFilters, TT, TS} = translate_gen(Meta, Left, Right, T, S),
+ TAcc = [{bin, Meta, TLeft, TRight, TFilters}|Acc],
case elixir_bitstring:has_size(TLeft) of
true -> translate_gen(ForMeta, TT, TAcc, TS);
false ->
@@ -92,50 +92,50 @@ translate_gen(ForMeta, [{ '<<>>', _, [ { '<-', Meta, [Left, Right] } ] }|T], Acc
"bitstring fields without size are not allowed in bitstring generators")
end;
translate_gen(_ForMeta, [], Acc, S) ->
- { lists:reverse(Acc), S };
+ {lists:reverse(Acc), S};
translate_gen(ForMeta, _, _, S) ->
elixir_errors:compile_error(ForMeta, S#elixir_scope.file,
"for comprehensions must start with a generator").
translate_gen(_Meta, Left, Right, T, S) ->
- { TRight, SR } = elixir_translator:translate(Right, S),
- { TLeft, SL } = elixir_clauses:match(fun elixir_translator:translate/2, Left, SR),
- { TT, { TFilters, TS } } = translate_filters(T, SL),
- { TLeft, TRight, TFilters, TT, TS }.
+ {TRight, SR} = elixir_translator:translate(Right, S),
+ {TLeft, SL} = elixir_clauses:match(fun elixir_translator:translate/2, Left, SR),
+ {TT, {TFilters, TS}} = translate_filters(T, SL),
+ {TLeft, TRight, TFilters, TT, TS}.
translate_filters(T, S) ->
- { Filters, Rest } = collect_filters(T, []),
- { Rest, lists:mapfoldr(fun translate_filter/2, S, Filters) }.
+ {Filters, Rest} = collect_filters(T, []),
+ {Rest, lists:mapfoldr(fun translate_filter/2, S, Filters)}.
translate_filter(Filter, S) ->
- { TFilter, TS } = elixir_translator:translate(Filter, S),
+ {TFilter, TS} = elixir_translator:translate(Filter, S),
case elixir_utils:returns_boolean(TFilter) of
true ->
- { { nil, TFilter }, TS };
+ {{nil, TFilter}, TS};
false ->
- { Name, _, VS } = elixir_scope:build_var('_', TS),
- { { { var, 0, Name }, TFilter }, VS }
+ {Name, _, VS} = elixir_scope:build_var('_', TS),
+ {{{var, 0, Name}, TFilter}, VS}
end.
-collect_filters([{ '<-', _, [_, _] }|_] = T, Acc) ->
- { Acc, T };
-collect_filters([{ '<<>>', _, [{ '<-', _, [_, _] }] }|_] = T, Acc) ->
- { Acc, T };
+collect_filters([{'<-', _, [_, _]}|_] = T, Acc) ->
+ {Acc, T};
+collect_filters([{'<<>>', _, [{'<-', _, [_, _]}]}|_] = T, Acc) ->
+ {Acc, T};
collect_filters([H|T], Acc) ->
collect_filters(T, [H|Acc]);
collect_filters([], Acc) ->
- { Acc, [] }.
+ {Acc, []}.
%% If all we have is one enum generator, we check if it is a list
%% for optimization otherwise fallback to the reduce generator.
-build_inline(Line, [{ enum, Meta, Left, Right, Filters }] = Orig, Expr, Into, Var, Acc, S) ->
+build_inline(Line, [{enum, Meta, Left, Right, Filters}] = Orig, Expr, Into, Var, Acc, S) ->
case Right of
- { cons, _, _, _ } ->
+ {cons, _, _, _} ->
build_comprehension(Line, Orig, Expr, Into);
- { Other, _, _ } when Other == tuple; Other == map; Other == integer; Other == atom ->
+ {Other, _, _} when Other == tuple; Other == map; Other == integer; Other == atom ->
build_reduce(Orig, Expr, Into, Acc, S);
_ ->
- Clauses = [{ enum, Meta, Left, Var, Filters }],
+ Clauses = [{enum, Meta, Left, Var, Filters}],
{'case', -1, Right, [
{clause, -1,
@@ -156,16 +156,16 @@ build_inline(Line, Clauses, Expr, Into, _Var, Acc, S) ->
end.
build_into(Line, Clauses, Expr, Into, Fun, Acc, S) ->
- { Kind, SK } = build_var(Line, S),
- { Reason, SR } = build_var(Line, SK),
- { Stack, ST } = build_var(Line, SR),
- { Done, SD } = build_var(Line, ST),
+ {Kind, SK} = build_var(Line, S),
+ {Reason, SR} = build_var(Line, SK),
+ {Stack, ST} = build_var(Line, SR),
+ {Done, SD} = build_var(Line, ST),
IntoExpr = {call, Line, Fun, [Acc, pair(Line, cont, Expr)]},
MatchExpr = {match, Line,
{tuple, Line, [Acc, Fun]},
?wrap_call(Line, 'Elixir.Collectable', into, [Into])
- },
+ },
TryExpr =
{'try', Line,
@@ -182,7 +182,7 @@ build_into(Line, Clauses, Expr, Into, Fun, Acc, S) ->
?wrap_call(Line, erlang, raise, [Kind, Reason, Stack])]}],
[]},
- { {block, Line, [MatchExpr, TryExpr]}, SD }.
+ {{block, Line, [MatchExpr, TryExpr]}, SD}.
%% Helpers
@@ -197,7 +197,7 @@ build_reduce(Clauses, Expr, {bin, _, _} = Into, Acc, S) ->
BinExpr = {bin, Line, [{bin_element, Line, Acc, default, [bitstring]}|Elements]},
build_reduce_clause(Clauses, BinExpr, Into, Acc, S).
-build_reduce_clause([{ enum, Meta, Left, Right, Filters }|T], Expr, Arg, Acc, S) ->
+build_reduce_clause([{enum, Meta, Left, Right, Filters}|T], Expr, Arg, Acc, S) ->
Line = ?line(Meta),
Inner = build_reduce_clause(T, Expr, Acc, Acc, S),
@@ -224,10 +224,10 @@ build_reduce_clause([{ enum, Meta, Left, Right, Filters }|T], Expr, Arg, Acc, S)
%% Use -1 because in case of no returns we don't care about the result
?wrap_call(-1, erlang, element, [{integer, Line, 2}, Tuple]);
-build_reduce_clause([{ bin, Meta, Left, Right, Filters }|T], Expr, Arg, Acc, S) ->
+build_reduce_clause([{bin, Meta, Left, Right, Filters}|T], Expr, Arg, Acc, S) ->
Line = ?line(Meta),
- { Tail, ST } = build_var(Line, S),
- { Fun, SF } = build_var(Line, ST),
+ {Tail, ST} = build_var(Line, S),
+ {Fun, SF} = build_var(Line, ST),
True = build_reduce_clause(T, Expr, Acc, Acc, SF),
False = Acc,
@@ -235,9 +235,9 @@ build_reduce_clause([{ bin, Meta, Left, Right, Filters }|T], Expr, Arg, Acc, S)
{bin, _, Elements} = Left,
BinMatch =
- {bin, Line, Elements ++ [{ bin_element, Line, Tail, default, [bitstring] }]},
+ {bin, Line, Elements ++ [{bin_element, Line, Tail, default, [bitstring]}]},
NoVarMatch =
- {bin, Line, no_var(Elements) ++ [{ bin_element, Line, Tail, default, [bitstring] }]},
+ {bin, Line, no_var(Elements) ++ [{bin_element, Line, Tail, default, [bitstring]}]},
Clauses =
[{clause, Line,
@@ -267,13 +267,13 @@ pair(Line, Atom, Arg) ->
{tuple, Line, [{atom, Line, Atom}, Arg]}.
build_var(Line, S) ->
- { Name, _, ST } = elixir_scope:build_var('_', S),
- { { var, Line, Name }, ST }.
+ {Name, _, ST} = elixir_scope:build_var('_', S),
+ {{var, Line, Name}, ST}.
no_var(Elements) ->
[{bin_element, Line, no_var_expr(Expr), Size, Types} ||
{bin_element, Line, Expr, Size, Types} <- Elements].
-no_var_expr({ var, Line, _ }) ->
+no_var_expr({var, Line, _}) ->
{var, Line, '_'}.
build_comprehension(Line, Clauses, Expr, false) ->
@@ -284,7 +284,7 @@ build_comprehension(Line, Clauses, Expr, false) ->
build_comprehension(Line, Clauses, Expr, Into) ->
{comprehension_kind(Into), Line, Expr, comprehension_clause(Clauses)}.
-comprehension_clause([{ Kind, Meta, Left, Right, Filters }|T]) ->
+comprehension_clause([{Kind, Meta, Left, Right, Filters}|T]) ->
Line = ?line(Meta),
[{comprehension_generator(Kind), Line, Left, Right}] ++
comprehension_filter(Line, Filters) ++
@@ -292,26 +292,26 @@ comprehension_clause([{ Kind, Meta, Left, Right, Filters }|T]) ->
comprehension_clause([]) ->
[].
-comprehension_kind({ nil, _ }) -> lc;
-comprehension_kind({ bin, _, [] }) -> bc.
+comprehension_kind({nil, _}) -> lc;
+comprehension_kind({bin, _, []}) -> bc.
comprehension_generator(enum) -> generate;
comprehension_generator(bin) -> b_generate.
-comprehension_expr({ bin, _, [] }, { bin, _, _ } = Expr) ->
- { inline, Expr };
-comprehension_expr({ bin, Line, [] }, Expr) ->
- BinExpr = { bin, Line, [{ bin_element, Line, Expr, default, [bitstring] }] },
- { inline, BinExpr };
-comprehension_expr({ nil, _ }, Expr) ->
- { inline, Expr };
+comprehension_expr({bin, _, []}, {bin, _, _} = Expr) ->
+ {inline, Expr};
+comprehension_expr({bin, Line, []}, Expr) ->
+ BinExpr = {bin, Line, [{bin_element, Line, Expr, default, [bitstring]}]},
+ {inline, BinExpr};
+comprehension_expr({nil, _}, Expr) ->
+ {inline, Expr};
comprehension_expr(false, Expr) ->
- { inline, Expr };
+ {inline, Expr};
comprehension_expr(_, Expr) ->
- { into, Expr }.
+ {into, Expr}.
comprehension_filter(Line, Filters) ->
- [join_filter(Line, Filter, { atom, Line, true }, { atom, Line, false }) ||
+ [join_filter(Line, Filter, {atom, Line, true}, {atom, Line, false}) ||
Filter <- lists:reverse(Filters)].
join_filters(_Line, [], True, _False) ->
@@ -321,18 +321,18 @@ join_filters(Line, [H|T], True, False) ->
join_filter(Line, Filter, Acc, False)
end, join_filter(Line, H, True, False), T).
-join_filter(Line, { nil, Filter }, True, False) ->
+join_filter(Line, {nil, Filter}, True, False) ->
{'case', Line, Filter, [
- {clause, Line, [{atom, Line, true}], [], [True] },
- {clause, Line, [{atom, Line, false}], [], [False] }
- ] };
-join_filter(Line, { Var, Filter }, True, False) ->
+ {clause, Line, [{atom, Line, true}], [], [True]},
+ {clause, Line, [{atom, Line, false}], [], [False]}
+ ]};
+join_filter(Line, {Var, Filter}, True, False) ->
Guard =
{op, Line, 'orelse',
{op, Line, '==', Var, {atom, Line, false}},
{op, Line, '==', Var, {atom, Line, nil}}},
{'case', Line, Filter, [
- {clause, Line, [Var], [[Guard]], [False] },
- {clause, Line, [{var, Line, '_'}], [], [True] }
- ] }.
+ {clause, Line, [Var], [[Guard]], [False]},
+ {clause, Line, [{var, Line, '_'}], [], [True]}
+ ]}.
diff --git a/lib/elixir/src/elixir_import.erl b/lib/elixir/src/elixir_import.erl
index b6a123385..17bbc6022 100644
--- a/lib/elixir/src/elixir_import.erl
+++ b/lib/elixir/src/elixir_import.erl
@@ -10,18 +10,18 @@
import(Meta, Ref, Opts, E) ->
Res =
case keyfind(only, Opts) of
- { only, functions } ->
- { import_functions(Meta, Ref, Opts, E),
- E#elixir_env.macros };
- { only, macros } ->
- { E#elixir_env.functions,
- import_macros(true, Meta, Ref, Opts, E) };
- { only, List } when is_list(List) ->
- { import_functions(Meta, Ref, Opts, E),
- import_macros(false, Meta, Ref, Opts, E) };
+ {only, functions} ->
+ {import_functions(Meta, Ref, Opts, E),
+ E#elixir_env.macros};
+ {only, macros} ->
+ {E#elixir_env.functions,
+ import_macros(true, Meta, Ref, Opts, E)};
+ {only, List} when is_list(List) ->
+ {import_functions(Meta, Ref, Opts, E),
+ import_macros(false, Meta, Ref, Opts, E)};
false ->
- { import_functions(Meta, Ref, Opts, E),
- import_macros(false, Meta, Ref, Opts, E) }
+ {import_functions(Meta, Ref, Opts, E),
+ import_macros(false, Meta, Ref, Opts, E)}
end,
record_warn(Meta, Ref, Opts, E),
@@ -41,8 +41,8 @@ import_macros(Force, Meta, Ref, Opts, E) ->
record_warn(Meta, Ref, Opts, E) ->
Warn =
case keyfind(warn, Opts) of
- { warn, false } -> false;
- { warn, true } -> true;
+ {warn, false} -> false;
+ {warn, true} -> true;
false -> not lists:keymember(context, 1, Meta)
end,
elixir_lexical:record_import(Ref, ?line(Meta), Warn, E#elixir_env.lexical_tracker).
@@ -51,10 +51,10 @@ record_warn(Meta, Ref, Opts, E) ->
calculate(Meta, Key, Opts, Old, E, Existing) ->
New = case keyfind(only, Opts) of
- { only, Only } when is_list(Only) ->
+ {only, Only} when is_list(Only) ->
case Only -- get_exports(Key) of
[{Name,Arity}|_] ->
- Tuple = { invalid_import, { Key, Name, Arity } },
+ Tuple = {invalid_import, {Key, Name, Arity}},
elixir_errors:form_error(Meta, E#elixir_env.file, ?MODULE, Tuple);
_ ->
intersection(Only, Existing())
@@ -62,8 +62,8 @@ calculate(Meta, Key, Opts, Old, E, Existing) ->
_ ->
case keyfind(except, Opts) of
false -> remove_underscored(Existing());
- { except, [] } -> remove_underscored(Existing());
- { except, Except } when is_list(Except) ->
+ {except, []} -> remove_underscored(Existing());
+ {except, Except} when is_list(Except) ->
case keyfind(Key, Old) of
false -> remove_underscored(Existing()) -- Except;
{Key,OldImports} -> OldImports -- Except
@@ -79,7 +79,7 @@ calculate(Meta, Key, Opts, Old, E, Existing) ->
[] -> keydelete(Key, Old);
_ ->
ensure_no_special_form_conflict(Meta, E#elixir_env.file, Key, Final),
- [{ Key, Final }|keydelete(Key, Old)]
+ [{Key, Final}|keydelete(Key, Old)]
end.
%% Retrieve functions and macros from modules
@@ -103,19 +103,19 @@ get_macros(Meta, Module, E) ->
Module:'__info__'(macros)
catch
error:undef ->
- Tuple = { no_macros, Module },
+ Tuple = {no_macros, Module},
elixir_errors:form_error(Meta, E#elixir_env.file, ?MODULE, Tuple)
end.
get_optional_macros(Module) ->
case code:ensure_loaded(Module) of
- { module, Module } ->
+ {module, Module} ->
try
Module:'__info__'(macros)
catch
error:undef -> []
end;
- { error, _ } -> []
+ {error, _} -> []
end.
%% VALIDATION HELPERS
@@ -123,7 +123,7 @@ get_optional_macros(Module) ->
ensure_no_special_form_conflict(Meta, File, Key, [{Name,Arity}|T]) ->
case special_form(Name, Arity) of
true ->
- Tuple = { special_form_conflict, { Key, Name, Arity } },
+ Tuple = {special_form_conflict, {Key, Name, Arity}},
elixir_errors:form_error(Meta, File, ?MODULE, Tuple);
false ->
ensure_no_special_form_conflict(Meta, File, Key, T)
@@ -141,7 +141,7 @@ format_error({special_form_conflict,{Receiver, Name, Arity}}) ->
io_lib:format("cannot import ~ts.~ts/~B because it conflicts with Elixir special forms",
[elixir_aliases:inspect(Receiver), Name, Arity]);
-format_error({ no_macros, Module }) ->
+format_error({no_macros, Module}) ->
io_lib:format("could not load macros from module ~ts", [elixir_aliases:inspect(Module)]).
%% LIST HELPERS
@@ -163,7 +163,7 @@ intersection([], _All) -> [].
%% Internal funs that are never imported etc.
remove_underscored(List) ->
- lists:filter(fun({ Name, _ }) ->
+ lists:filter(fun({Name, _}) ->
case atom_to_list(Name) of
"_" ++ _ -> false;
_ -> true
@@ -171,8 +171,8 @@ remove_underscored(List) ->
end, List).
remove_internals(Set) ->
- ordsets:del_element({ module_info, 1 },
- ordsets:del_element({ module_info, 0 }, Set)).
+ ordsets:del_element({module_info, 1},
+ ordsets:del_element({module_info, 0}, Set)).
%% Special forms
diff --git a/lib/elixir/src/elixir_interpolation.erl b/lib/elixir/src/elixir_interpolation.erl
index c329442a1..281baa1d5 100644
--- a/lib/elixir/src/elixir_interpolation.erl
+++ b/lib/elixir/src/elixir_interpolation.erl
@@ -17,13 +17,13 @@ extract(Line, _Scope, _Interpol, [], Buffer, 0, Output, []) ->
finish_extraction(Line, Buffer, Output, []);
extract(Line, _Scope, _Interpol, [], _Buffer, 0, _Output, Last) ->
- { error, { string, Line, io_lib:format("missing terminator: ~ts", [[Last]]), [] } };
+ {error, {string, Line, io_lib:format("missing terminator: ~ts", [[Last]]), []}};
extract(Line, _Scope, _Interpol, [Last|Remaining], Buffer, 0, Output, Last) ->
finish_extraction(Line, Buffer, Output, Remaining);
extract(Line, _Scope, _Interpol, [], _Buffer, _Search, _Output, Last) ->
- { error, { string, Line, io_lib:format("missing terminator: ~ts", [[Last]]), [] } };
+ {error, {string, Line, io_lib:format("missing terminator: ~ts", [[Last]]), []}};
%% Going through the string
@@ -46,13 +46,13 @@ extract(Line, Scope, true, [$#, ${|Rest], Buffer, Search, Output, Last) ->
Output1 = build_string(Line, Buffer, Output),
case elixir_tokenizer:tokenize(Rest, Line, Scope) of
- { error, { EndLine, _, "}" }, [$}|NewRest], Tokens } ->
+ {error, {EndLine, _, "}"}, [$}|NewRest], Tokens} ->
Output2 = build_interpol(Line, Tokens, Output1),
extract(EndLine, Scope, true, NewRest, [], Search, Output2, Last);
- { error, Reason, _, _ } ->
- { error, Reason };
- { ok, _EndLine, _ } ->
- { error, { string, Line, "missing interpolation terminator: }", [] } }
+ {error, Reason, _, _} ->
+ {error, Reason};
+ {ok, _EndLine, _} ->
+ {error, {string, Line, "missing interpolation terminator:}", []}}
end;
%% Matching () [] {} <> inside sigils
@@ -173,11 +173,11 @@ finish_extraction(Line, Buffer, Output, Remaining) ->
[] -> Final = [<<>>];
Final -> []
end,
- { Line, lists:reverse(Final), Remaining }.
+ {Line, lists:reverse(Final), Remaining}.
build_string(_Line, [], Output) -> Output;
build_string(_Line, Buffer, Output) ->
[elixir_utils:characters_to_binary(lists:reverse(Buffer))|Output].
build_interpol(Line, Buffer, Output) ->
- [{ Line, lists:reverse(Buffer) }|Output].
+ [{Line, lists:reverse(Buffer)}|Output].
diff --git a/lib/elixir/src/elixir_lexical.erl b/lib/elixir/src/elixir_lexical.erl
index 81e27bd67..7f299442d 100644
--- a/lib/elixir/src/elixir_lexical.erl
+++ b/lib/elixir/src/elixir_lexical.erl
@@ -11,7 +11,7 @@
run(File, Callback) ->
case code:is_loaded(?tracker) of
- { file, _ } ->
+ {file, _} ->
Pid = ?tracker:start_link(),
try
Callback(Pid)
@@ -65,13 +65,13 @@ if_tracker(Pid, Callback) when is_pid(Pid) -> Callback(Pid).
warn_unused_imports(File, Pid) ->
[ begin
- elixir_errors:handle_file_warning(File, { L, ?MODULE, { unused_import, M } })
- end || { M, L } <- ?tracker:collect_unused_imports(Pid)].
+ elixir_errors:handle_file_warning(File, {L, ?MODULE, {unused_import, M}})
+ end || {M, L} <- ?tracker:collect_unused_imports(Pid)].
warn_unused_aliases(File, Pid) ->
[ begin
- elixir_errors:handle_file_warning(File, { L, ?MODULE, { unused_alias, M } })
- end || { M, L } <- ?tracker:collect_unused_aliases(Pid)].
+ elixir_errors:handle_file_warning(File, {L, ?MODULE, {unused_alias, M}})
+ end || {M, L} <- ?tracker:collect_unused_aliases(Pid)].
format_error({unused_alias, Module}) ->
io_lib:format("unused alias ~ts", [elixir_aliases:inspect(Module)]);
diff --git a/lib/elixir/src/elixir_locals.erl b/lib/elixir/src/elixir_locals.erl
index 0298d82b5..7832cc62e 100644
--- a/lib/elixir/src/elixir_locals.erl
+++ b/lib/elixir/src/elixir_locals.erl
@@ -13,9 +13,9 @@
-define(tracker, 'Elixir.Module.LocalsTracker').
macro_for(Module, Name, Arity) ->
- Tuple = { Name, Arity },
+ Tuple = {Name, Arity},
try elixir_def:lookup_definition(Module, Tuple) of
- { { Tuple, Kind, Line, _, _, _, _ }, [_|_] = Clauses }
+ {{Tuple, Kind, Line, _, _, _, _}, [_|_] = Clauses}
when Kind == defmacro; Kind == defmacrop ->
fun() -> get_function(Line, Module, Clauses) end;
_ ->
@@ -27,9 +27,9 @@ macro_for(Module, Name, Arity) ->
local_for(Module, Name, Arity) ->
local_for(Module, Name, Arity, nil).
local_for(Module, Name, Arity, Given) ->
- Tuple = { Name, Arity },
+ Tuple = {Name, Arity},
case elixir_def:lookup_definition(Module, Tuple) of
- { { Tuple, Kind, Line, _, _, _, _ }, [_|_] = Clauses }
+ {{Tuple, Kind, Line, _, _, _, _}, [_|_] = Clauses}
when Given == nil; Kind == Given ->
get_function(Line, Module, Clauses);
_ ->
@@ -39,27 +39,27 @@ local_for(Module, Name, Arity, Given) ->
get_function(Line, Module, Clauses) ->
RewrittenClauses = [rewrite_clause(Clause, Module) || Clause <- Clauses],
- Fun = { 'fun', Line, { clauses, RewrittenClauses } },
- { value, Result, _Binding } = erl_eval:exprs([Fun], []),
+ Fun = {'fun', Line, {clauses, RewrittenClauses}},
+ {value, Result, _Binding} = erl_eval:exprs([Fun], []),
Result.
-rewrite_clause({ call, Line, { atom, Line, RawName }, Args }, Module) ->
- Remote = { remote, Line,
- { atom, Line, ?MODULE },
- { atom, Line, local_for }
- },
+rewrite_clause({call, Line, {atom, Line, RawName}, Args}, Module) ->
+ Remote = {remote, Line,
+ {atom, Line, ?MODULE},
+ {atom, Line, local_for}
+ },
%% If we have a macro, its arity in the table is
%% actually one less than in the function call
- { Name, Arity } = case atom_to_list(RawName) of
- "MACRO-" ++ Rest -> { list_to_atom(Rest), length(Args) - 1 };
- _ -> { RawName, length(Args) }
+ {Name, Arity} = case atom_to_list(RawName) of
+ "MACRO-" ++ Rest -> {list_to_atom(Rest), length(Args) - 1};
+ _ -> {RawName, length(Args)}
end,
- FunCall = { call, Line, Remote, [
- { atom, Line, Module }, { atom, Line, Name }, { integer, Line, Arity }
- ] },
- { call, Line, FunCall, Args };
+ FunCall = {call, Line, Remote, [
+ {atom, Line, Module}, {atom, Line, Name}, {integer, Line, Arity}
+ ]},
+ {call, Line, FunCall, Args};
rewrite_clause(Tuple, Module) when is_tuple(Tuple) ->
list_to_tuple(rewrite_clause(tuple_to_list(Tuple), Module));
@@ -73,7 +73,7 @@ rewrite_clause(Else, _) -> Else.
setup(Module) ->
case code:is_loaded(?tracker) of
- { file, _ } -> ets:insert(Module, { ?attr, ?tracker:start_link() });
+ {file, _} -> ets:insert(Module, {?attr, ?tracker:start_link()});
false -> ok
end.
@@ -128,10 +128,10 @@ cache_env(#elixir_env{module=Module} = RE) ->
E = RE#elixir_env{line=nil,vars=[]},
try ets:lookup_element(Module, ?attr, 2) of
Pid ->
- { Pid, ?tracker:cache_env(Pid, E) }
+ {Pid, ?tracker:cache_env(Pid, E)}
catch
error:badarg ->
- { Escaped, _ } = elixir_quote:escape(E, false),
+ {Escaped, _} = elixir_quote:escape(E, false),
Escaped
end;
cache_env(ExEnv) ->
@@ -145,21 +145,21 @@ get_cached_env(Env) -> Env.
ensure_no_function_conflict(Meta, File, Module, AllDefined) ->
if_tracker(Module, fun(Pid) ->
[ begin
- elixir_errors:form_error(Meta, File, ?MODULE, { function_conflict, Error })
+ elixir_errors:form_error(Meta, File, ?MODULE, {function_conflict, Error})
end || Error <- ?tracker:collect_imports_conflicts(Pid, AllDefined) ]
end),
ok.
warn_unused_local(File, Module, Private) ->
if_tracker(Module, fun(Pid) ->
- Args = [ { Fun, Kind, Defaults } ||
- { Fun, Kind, _Line, true, Defaults } <- Private],
+ Args = [ {Fun, Kind, Defaults} ||
+ {Fun, Kind, _Line, true, Defaults} <- Private],
Unused = ?tracker:collect_unused_locals(Pid, Args),
[ begin
- { _, _, Line, _, _ } = lists:keyfind(element(2, Error), 1, Private),
- elixir_errors:handle_file_warning(File, { Line, ?MODULE, Error })
+ {_, _, Line, _, _} = lists:keyfind(element(2, Error), 1, Private),
+ elixir_errors:handle_file_warning(File, {Line, ?MODULE, Error})
end || Error <- Unused ]
end).
diff --git a/lib/elixir/src/elixir_map.erl b/lib/elixir/src/elixir_map.erl
index 5acb3a03f..c690b04b4 100644
--- a/lib/elixir/src/elixir_map.erl
+++ b/lib/elixir/src/elixir_map.erl
@@ -3,15 +3,15 @@
-import(elixir_errors, [compile_error/4]).
-include("elixir.hrl").
-expand_map(Meta, [{ '|', UpdateMeta, [Left, Right]}], E) ->
- { [ELeft|ERight], EA } = elixir_exp:expand_args([Left|Right], E),
- { { '%{}', Meta, [{ '|', UpdateMeta, [ELeft, ERight] }] }, EA };
+expand_map(Meta, [{'|', UpdateMeta, [Left, Right]}], E) ->
+ {[ELeft|ERight], EA} = elixir_exp:expand_args([Left|Right], E),
+ {{'%{}', Meta, [{'|', UpdateMeta, [ELeft, ERight]}]}, EA};
expand_map(Meta, Args, E) ->
- { EArgs, EA } = elixir_exp:expand_args(Args, E),
- { { '%{}', Meta, EArgs }, EA }.
+ {EArgs, EA} = elixir_exp:expand_args(Args, E),
+ {{'%{}', Meta, EArgs}, EA}.
expand_struct(Meta, Left, Right, E) ->
- { [ELeft, ERight], EE } = elixir_exp:expand_args([Left, Right], E),
+ {[ELeft, ERight], EE} = elixir_exp:expand_args([Left, Right], E),
case is_atom(ELeft) of
true -> ok;
@@ -31,27 +31,27 @@ expand_struct(Meta, Left, Right, E) ->
"the struct fields are not yet accessible",
[elixir_aliases:inspect(ELeft)]);
false ->
- [{ struct, context }|Meta]
+ [{struct, context}|Meta]
end;
false ->
Meta
end,
case ERight of
- { '%{}', _, _ } -> ok;
+ {'%{}', _, _} -> ok;
_ -> compile_error(Meta, E#elixir_env.file,
"expected struct to be followed by a map, got: ~ts",
['Elixir.Macro':to_string(ERight)])
end,
- { { '%', EMeta, [ELeft, ERight] }, EE }.
+ {{'%', EMeta, [ELeft, ERight]}, EE}.
translate_map(Meta, Args, S) ->
- { Assocs, TUpdate, US } = extract_assoc_update(Args, S),
+ {Assocs, TUpdate, US} = extract_assoc_update(Args, S),
translate_map(Meta, Assocs, TUpdate, US).
-translate_struct(Meta, Name, { '%{}', MapMeta, Args }, S) ->
- { Assocs, TUpdate, US } = extract_assoc_update(Args, S),
+translate_struct(Meta, Name, {'%{}', MapMeta, Args}, S) ->
+ {Assocs, TUpdate, US} = extract_assoc_update(Args, S),
Struct = load_struct(Meta, Name, S),
case is_map(Struct) of
@@ -65,25 +65,25 @@ translate_struct(Meta, Name, { '%{}', MapMeta, Args }, S) ->
if
TUpdate /= nil ->
Line = ?line(Meta),
- { VarName, _, VS } = elixir_scope:build_var('_', US),
+ {VarName, _, VS} = elixir_scope:build_var('_', US),
- Var = { var, Line, VarName },
- Map = { map, Line, [{ map_field_exact, Line, { atom, Line, '__struct__' }, { atom, Line, Name }}] },
+ Var = {var, Line, VarName},
+ Map = {map, Line, [{map_field_exact, Line, {atom, Line, '__struct__'}, {atom, Line, Name}}]},
- Match = { match, Line, Var, Map },
- Error = { tuple, Line, [{ atom, Line, badstruct }, { atom, Line, Name }, Var] },
+ Match = {match, Line, Var, Map},
+ Error = {tuple, Line, [{atom, Line, badstruct}, {atom, Line, Name}, Var]},
- { TMap, TS } = translate_map(MapMeta, Assocs, Var, VS),
+ {TMap, TS} = translate_map(MapMeta, Assocs, Var, VS),
- { { 'case', Line, TUpdate, [
- { clause, Line, [Match], [], [TMap] },
- { clause, Line, [Var], [], [?wrap_call(Line, erlang, error, [Error])] }
- ] }, TS };
+ {{'case', Line, TUpdate, [
+ {clause, Line, [Match], [], [TMap]},
+ {clause, Line, [Var], [], [?wrap_call(Line, erlang, error, [Error])]}
+ ]}, TS};
S#elixir_scope.context == match ->
translate_map(MapMeta, Assocs ++ [{'__struct__', Name}], nil, US);
true ->
Keys = [K || {K,_} <- Assocs],
- { StructAssocs, _ } = elixir_quote:escape(maps:to_list(maps:without(Keys, Struct)), false),
+ {StructAssocs, _} = elixir_quote:escape(maps:to_list(maps:without(Keys, Struct)), false),
translate_map(MapMeta, StructAssocs ++ Assocs ++ [{'__struct__', Name}], nil, US)
end.
@@ -93,7 +93,7 @@ load_struct(Meta, Name, S) ->
Local =
elixir_module:is_open(Name) andalso
(case lists:keyfind(struct, 1, Meta) of
- { struct, context } -> true;
+ {struct, context} -> true;
_ -> wait_for_struct(Name)
end),
@@ -122,26 +122,26 @@ wait_for_struct(Module) ->
false;
Pid ->
Ref = erlang:make_ref(),
- Pid ! { waiting, struct, self(), Ref, Module },
+ Pid ! {waiting, struct, self(), Ref, Module},
receive
- { Ref, ready } ->
+ {Ref, ready} ->
true;
- { Ref, release } ->
+ {Ref, release} ->
'Elixir.Kernel.ErrorHandler':release(),
false
end
end.
translate_map(Meta, Assocs, TUpdate, #elixir_scope{extra=Extra} = S) ->
- { Op, KeyFun, ValFun } = extract_key_val_op(TUpdate, S),
+ {Op, KeyFun, ValFun} = extract_key_val_op(TUpdate, S),
Line = ?line(Meta),
- { TArgs, SA } = lists:mapfoldl(fun
- ({ Key, Value }, Acc) ->
- { TKey, Acc1 } = KeyFun(Key, Acc),
- { TValue, Acc2 } = ValFun(Value, Acc1#elixir_scope{extra=Extra}),
- { { Op, ?line(Meta), TKey, TValue }, Acc2 };
+ {TArgs, SA} = lists:mapfoldl(fun
+ ({Key, Value}, Acc) ->
+ {TKey, Acc1} = KeyFun(Key, Acc),
+ {TValue, Acc2} = ValFun(Value, Acc1#elixir_scope{extra=Extra}),
+ {{Op, ?line(Meta), TKey, TValue}, Acc2};
(Other, _Acc) ->
compile_error(Meta, S#elixir_scope.file, "expected key-value pairs in map, got: ~ts",
['Elixir.Macro':to_string(Other)])
@@ -149,27 +149,27 @@ translate_map(Meta, Assocs, TUpdate, #elixir_scope{extra=Extra} = S) ->
build_map(Line, TUpdate, TArgs, SA).
-extract_assoc_update([{ '|', _Meta, [Update, Args] }], S) ->
- { TArg, SA } = elixir_translator:translate_arg(Update, S, S),
- { Args, TArg, SA };
-extract_assoc_update(Args, SA) -> { Args, nil, SA }.
+extract_assoc_update([{'|', _Meta, [Update, Args]}], S) ->
+ {TArg, SA} = elixir_translator:translate_arg(Update, S, S),
+ {Args, TArg, SA};
+extract_assoc_update(Args, SA) -> {Args, nil, SA}.
extract_key_val_op(_TUpdate, #elixir_scope{context=match}) ->
- { map_field_exact,
+ {map_field_exact,
fun(X, Acc) -> elixir_translator:translate(X, Acc#elixir_scope{extra=map_key}) end,
- fun elixir_translator:translate/2 };
+ fun elixir_translator:translate/2};
extract_key_val_op(TUpdate, S) ->
KS = #elixir_scope{extra=map_key},
Op = if TUpdate == nil -> map_field_assoc; true -> map_field_exact end,
- { Op,
+ {Op,
fun(X, Acc) -> elixir_translator:translate_arg(X, Acc, KS) end,
- fun(X, Acc) -> elixir_translator:translate_arg(X, Acc, S) end }.
+ fun(X, Acc) -> elixir_translator:translate_arg(X, Acc, S) end}.
-build_map(Line, nil, TArgs, SA) -> { { map, Line, TArgs }, SA };
-build_map(Line, TUpdate, TArgs, SA) -> { { map, Line, TUpdate, TArgs }, SA }.
+build_map(Line, nil, TArgs, SA) -> {{map, Line, TArgs}, SA};
+build_map(Line, TUpdate, TArgs, SA) -> {{map, Line, TUpdate, TArgs}, SA}.
assert_struct_keys(Meta, Name, Struct, Assocs, S) ->
[begin
compile_error(Meta, S#elixir_scope.file, "unknown key ~ts for struct ~ts",
['Elixir.Kernel':inspect(Key), elixir_aliases:inspect(Name)])
- end || { Key, _ } <- Assocs, not maps:is_key(Key, Struct)].
+ end || {Key, _} <- Assocs, not maps:is_key(Key, Struct)].
diff --git a/lib/elixir/src/elixir_module.erl b/lib/elixir/src/elixir_module.erl
index 575f4167f..2fb0ecbe0 100644
--- a/lib/elixir/src/elixir_module.erl
+++ b/lib/elixir/src/elixir_module.erl
@@ -41,7 +41,7 @@ compile(Module, Block, Vars, #elixir_env{line=Line} = Env) when is_atom(Module)
end;
compile(Module, _Block, _Vars, #elixir_env{line=Line,file=File}) ->
- elixir_errors:form_error(Line, File, ?MODULE, { invalid_module, Module });
+ elixir_errors:form_error(Line, File, ?MODULE, {invalid_module, Module});
compile(Module, Block, Vars, ExEnv) ->
compile(Module, Block, Vars, elixir_env:ex_to_env(ExEnv)).
@@ -52,10 +52,10 @@ do_compile(Line, Module, Block, Vars, E) ->
build(Line, File, Module, E#elixir_env.lexical_tracker),
try
- { Result, NE } = eval_form(Line, Module, Block, Vars, E),
- { Base, Export, Private, Def, Defmacro, Functions } = elixir_def:unwrap_definitions(Module),
+ {Result, NE} = eval_form(Line, Module, Block, Vars, E),
+ {Base, Export, Private, Def, Defmacro, Functions} = elixir_def:unwrap_definitions(Module),
- { All, Forms0 } = functions_form(Line, File, Module, Base, Export, Def, Defmacro, Functions),
+ {All, Forms0} = functions_form(Line, File, Module, Base, Export, Def, Defmacro, Functions),
Forms1 = specs_form(Module, Private, Defmacro, Forms0),
Forms2 = attributes_form(Line, File, Module, Forms1),
Forms3 = typedocs_form(Module, Forms2),
@@ -66,21 +66,21 @@ do_compile(Line, Module, Block, Vars, E) ->
[elixir_locals:record_local(Tuple, Module) || Tuple <- OnLoad]
end,
- AllFunctions = Def ++ [T || { T, defp, _, _, _ } <- Private],
+ AllFunctions = Def ++ [T || {T, defp, _, _, _} <- Private],
elixir_locals:ensure_no_function_conflict(Line, File, Module, AllFunctions),
elixir_locals:warn_unused_local(File, Module, Private),
warn_invalid_clauses(Line, File, Module, All),
warn_unused_docs(Line, File, Module),
- Location = { elixir_utils:relative_to_cwd(elixir_utils:characters_to_list(File)), Line },
+ Location = {elixir_utils:relative_to_cwd(elixir_utils:characters_to_list(File)), Line},
Final = [
- { attribute, Line, file, Location },
- { attribute, Line, module, Module } | Forms3
+ {attribute, Line, file, Location},
+ {attribute, Line, module, Module} | Forms3
],
Binary = load_form(Line, Final, compile_opts(Module), NE),
- { module, Module, Binary, Result }
+ {module, Module, Binary, Result}
after
elixir_locals:cleanup(Module),
elixir_def:cleanup(Module),
@@ -95,25 +95,25 @@ build(Line, File, Module, Lexical) ->
DataTable = data_table(Module),
case ets:info(DataTable, name) == DataTable of
- true -> elixir_errors:form_error(Line, File, ?MODULE, { module_in_definition, Module });
+ true -> elixir_errors:form_error(Line, File, ?MODULE, {module_in_definition, Module});
false -> []
end,
ets:new(DataTable, [set, named_table, public]),
- ets:insert(DataTable, { before_compile, [] }),
- ets:insert(DataTable, { after_compile, [] }),
+ ets:insert(DataTable, {before_compile, []}),
+ ets:insert(DataTable, {after_compile, []}),
case elixir_compiler:get_opt(docs) of
- true -> ets:insert(DataTable, { on_definition, [{ 'Elixir.Module', compile_doc }] });
- _ -> ets:insert(DataTable, { on_definition, [] })
+ true -> ets:insert(DataTable, {on_definition, [{'Elixir.Module', compile_doc}]});
+ _ -> ets:insert(DataTable, {on_definition, []})
end,
Attributes = [behaviour, on_load, spec, type, export_type, opaque, callback, compile],
- ets:insert(DataTable, { ?acc_attr, [before_compile, after_compile, on_definition|Attributes] }),
- ets:insert(DataTable, { ?persisted_attr, [vsn|Attributes] }),
- ets:insert(DataTable, { ?docs_attr, ets:new(DataTable, [ordered_set, public]) }),
- ets:insert(DataTable, { ?lexical_attr, Lexical }),
- ets:insert(DataTable, { ?overridable_attr, [] }),
+ ets:insert(DataTable, {?acc_attr, [before_compile, after_compile, on_definition|Attributes]}),
+ ets:insert(DataTable, {?persisted_attr, [vsn|Attributes]}),
+ ets:insert(DataTable, {?docs_attr, ets:new(DataTable, [ordered_set, public])}),
+ ets:insert(DataTable, {?lexical_attr, Lexical}),
+ ets:insert(DataTable, {?overridable_attr, []}),
%% Setup other modules
elixir_def:setup(Module),
@@ -122,24 +122,24 @@ build(Line, File, Module, Lexical) ->
%% Receives the module representation and evaluates it.
eval_form(Line, Module, Block, Vars, E) ->
- { Value, EE } = elixir_compiler:eval_forms(Block, Vars, E),
+ {Value, EE} = elixir_compiler:eval_forms(Block, Vars, E),
elixir_def_overridable:store_pending(Module),
- EC = eval_callbacks(Line, Module, before_compile, [elixir_env:env_to_ex({ Line, EE })], EE),
+ EC = eval_callbacks(Line, Module, before_compile, [elixir_env:env_to_ex({Line, EE})], EE),
elixir_def_overridable:store_pending(Module),
- { Value, EC }.
+ {Value, EC}.
%% Return the form with exports and function declarations.
functions_form(Line, File, Module, BaseAll, BaseExport, Def, Defmacro, BaseFunctions) ->
Info = add_info_function(Line, File, Module, BaseExport, Def, Defmacro),
- All = [{ '__info__', 1 }|BaseAll],
- Export = [{ '__info__', 1 }|BaseExport],
+ All = [{'__info__', 1}|BaseAll],
+ Export = [{'__info__', 1}|BaseExport],
Functions = [Info|BaseFunctions],
- { All, [
- { attribute, Line, export, lists:sort(Export) } | Functions
- ] }.
+ {All, [
+ {attribute, Line, export, lists:sort(Export)} | Functions
+ ]}.
%% Add attributes handling to the form
@@ -149,7 +149,7 @@ attributes_form(Line, _File, Module, Current) ->
AccAttrs = ets:lookup_element(Table, '__acc_attributes', 2),
PersistedAttrs = ets:lookup_element(Table, '__persisted_attributes', 2),
- Transform = fun({ Key, Value }, Acc) ->
+ Transform = fun({Key, Value}, Acc) ->
case lists:member(Key, PersistedAttrs) of
false -> Acc;
true ->
@@ -157,7 +157,7 @@ attributes_form(Line, _File, Module, Current) ->
true -> Value;
false -> [Value]
end,
- lists:foldl(fun(X, Final) -> [{ attribute, Line, Key, X }|Final] end, Acc, Attrs)
+ lists:foldl(fun(X, Final) -> [{attribute, Line, Key, X}|Final] end, Acc, Attrs)
end
end,
@@ -166,10 +166,10 @@ attributes_form(Line, _File, Module, Current) ->
%% Add typedocs to the form
typedocs_form(Module, Current) ->
Table = docs_table(Module),
- Transform = fun({ Tuple, Line, Kind, _Sig, Doc }, Acc) ->
+ Transform = fun({Tuple, Line, Kind, _Sig, Doc}, Acc) ->
case Kind of
- type -> [{ attribute, Line, typedoc, { Tuple, Doc } } | Acc];
- opaque -> [{ attribute, Line, typedoc, { Tuple, Doc } } | Acc];
+ type -> [{attribute, Line, typedoc, {Tuple, Doc}} | Acc];
+ opaque -> [{attribute, Line, typedoc, {Tuple, Doc}} | Acc];
_ -> Acc
end
end,
@@ -178,9 +178,9 @@ typedocs_form(Module, Current) ->
%% Specs
specs_form(Module, Private, Defmacro, Forms) ->
- Defmacrop = [Tuple || { Tuple, defmacrop, _, _, _ } <- Private],
+ Defmacrop = [Tuple || {Tuple, defmacrop, _, _, _} <- Private],
case code:ensure_loaded('Elixir.Kernel.Typespec') of
- { module, 'Elixir.Kernel.Typespec' } ->
+ {module, 'Elixir.Kernel.Typespec'} ->
Callbacks = 'Elixir.Module':get_attribute(Module, callback),
Specs = [translate_spec(Spec, Defmacro, Defmacrop) ||
Spec <- 'Elixir.Module':get_attribute(Module, spec)],
@@ -190,36 +190,36 @@ specs_form(Module, Private, Defmacro, Forms) ->
Temp = specs_attributes(spec, Forms, Specs),
specs_attributes(callback, Temp, Callbacks);
- { error, _ } ->
+ {error, _} ->
Forms
end.
specs_attributes(Type, Forms, Specs) ->
- Keys = lists:foldl(fun({ Tuple, Value }, Acc) ->
- lists:keystore(Tuple, 1, Acc, { Tuple, Value })
+ Keys = lists:foldl(fun({Tuple, Value}, Acc) ->
+ lists:keystore(Tuple, 1, Acc, {Tuple, Value})
end, [], Specs),
- lists:foldl(fun({ Tuple, _ }, Acc) ->
- Values = [V || { K, V } <- Specs, K == Tuple],
- { type, Line, _, _ } = hd(Values),
- [{ attribute, Line, Type, { Tuple, Values } }|Acc]
+ lists:foldl(fun({Tuple, _}, Acc) ->
+ Values = [V || {K, V} <- Specs, K == Tuple],
+ {type, Line, _, _} = hd(Values),
+ [{attribute, Line, Type, {Tuple, Values}}|Acc]
end, Forms, Keys).
-translate_spec({ Spec, Rest }, Defmacro, Defmacrop) ->
+translate_spec({Spec, Rest}, Defmacro, Defmacrop) ->
case ordsets:is_element(Spec, Defmacrop) of
- true -> { Spec, Rest };
+ true -> {Spec, Rest};
false ->
case ordsets:is_element(Spec, Defmacro) of
true ->
- { Name, Arity } = Spec,
- { { ?elixir_macro(Name), Arity + 1 }, spec_for_macro(Rest) };
+ {Name, Arity} = Spec,
+ {{?elixir_macro(Name), Arity + 1}, spec_for_macro(Rest)};
false ->
- { Spec, Rest }
+ {Spec, Rest}
end
end.
-spec_for_macro({ type, Line, 'fun', [{ type, _, product, Args }|T] }) ->
+spec_for_macro({type, Line, 'fun', [{type, _, product, Args}|T]}) ->
NewArgs = [{type,Line,term,[]}|Args],
- { type, Line, 'fun', [{ type, Line, product, NewArgs }|T] };
+ {type, Line, 'fun', [{type, Line, product, NewArgs}|T]};
spec_for_macro(Else) -> Else.
@@ -233,7 +233,7 @@ compile_opts(Module) ->
load_form(Line, Forms, Opts, #elixir_env{file=File} = E) ->
elixir_compiler:module(Forms, File, Opts, fun(Module, Binary) ->
- Env = elixir_env:env_to_ex({ Line, E }),
+ Env = elixir_env:env_to_ex({Line, E}),
eval_callbacks(Line, Module, after_compile, [Env, Binary], E),
case get(elixir_compiled) of
@@ -244,8 +244,8 @@ load_form(Line, Forms, Opts, #elixir_env{file=File} = E) ->
undefined -> [];
PID ->
Ref = make_ref(),
- PID ! { module_available, self(), Ref, File, Module, Binary },
- receive { Ref, ack } -> ok end
+ PID ! {module_available, self(), Ref, File, Module, Binary},
+ receive {Ref, ack} -> ok end
end;
_ ->
[]
@@ -259,16 +259,16 @@ check_module_availability(Line, File, Module) ->
'Elixir.PID', 'Elixir.Reference', 'Elixir.Any'],
case lists:member(Module, Reserved) of
- true -> elixir_errors:handle_file_error(File, { Line, ?MODULE, { module_reserved, Module } });
+ true -> elixir_errors:handle_file_error(File, {Line, ?MODULE, {module_reserved, Module}});
false -> ok
end,
case elixir_compiler:get_opt(ignore_module_conflict) of
false ->
case code:ensure_loaded(Module) of
- { module, _ } ->
- elixir_errors:handle_file_warning(File, { Line, ?MODULE, { module_defined, Module } });
- { error, _ } ->
+ {module, _} ->
+ elixir_errors:handle_file_warning(File, {Line, ?MODULE, {module_defined, Module}});
+ {error, _} ->
ok
end;
true ->
@@ -278,12 +278,12 @@ check_module_availability(Line, File, Module) ->
warn_invalid_clauses(_Line, _File, 'Elixir.Kernel.SpecialForms', _All) -> ok;
warn_invalid_clauses(_Line, File, Module, All) ->
ets:foldl(fun
- ({ _, _, Kind, _, _ }, _) when Kind == type; Kind == opaque ->
+ ({_, _, Kind, _, _}, _) when Kind == type; Kind == opaque ->
ok;
- ({ Tuple, Line, _, _, _ }, _) ->
+ ({Tuple, Line, _, _, _}, _) ->
case lists:member(Tuple, All) of
false ->
- elixir_errors:handle_file_warning(File, { Line, ?MODULE, { invalid_clause, Tuple } });
+ elixir_errors:handle_file_warning(File, {Line, ?MODULE, {invalid_clause, Tuple}});
true ->
ok
end
@@ -293,7 +293,7 @@ warn_unused_docs(Line, File, Module) ->
lists:foreach(fun(Attribute) ->
case ets:member(data_table(Module), Attribute) of
true ->
- elixir_errors:handle_file_warning(File, { Line, ?MODULE, { unused_doc, Attribute } });
+ elixir_errors:handle_file_warning(File, {Line, ?MODULE, {unused_doc, Attribute}});
_ ->
ok
end
@@ -302,51 +302,51 @@ warn_unused_docs(Line, File, Module) ->
% EXTRA FUNCTIONS
add_info_function(Line, File, Module, Export, Def, Defmacro) ->
- Pair = { '__info__', 1 },
+ Pair = {'__info__', 1},
case lists:member(Pair, Export) of
true ->
elixir_errors:form_error(Line, File, ?MODULE, {internal_function_overridden, Pair});
false ->
Docs = elixir_compiler:get_opt(docs),
- { function, 0, '__info__', 1, [
+ {function, 0, '__info__', 1, [
functions_clause(Def),
macros_clause(Defmacro),
docs_clause(Module, Docs),
moduledoc_clause(Line, Module, Docs),
module_clause(Module),
else_clause()
- ] }
+ ]}
end.
functions_clause(Def) ->
- { clause, 0, [{ atom, 0, functions }], [], [elixir_utils:elixir_to_erl(Def)] }.
+ {clause, 0, [{atom, 0, functions}], [], [elixir_utils:elixir_to_erl(Def)]}.
macros_clause(Defmacro) ->
- { clause, 0, [{ atom, 0, macros }], [], [elixir_utils:elixir_to_erl(Defmacro)] }.
+ {clause, 0, [{atom, 0, macros}], [], [elixir_utils:elixir_to_erl(Defmacro)]}.
module_clause(Module) ->
- { clause, 0, [{ atom, 0, module }], [], [{ atom, 0, Module }] }.
+ {clause, 0, [{atom, 0, module}], [], [{atom, 0, Module}]}.
docs_clause(Module, true) ->
Docs = ordsets:from_list(
[{Tuple, Line, Kind, Sig, Doc} ||
{Tuple, Line, Kind, Sig, Doc} <- ets:tab2list(docs_table(Module)),
Kind =/= type, Kind =/= opaque]),
- { clause, 0, [{ atom, 0, docs }], [], [elixir_utils:elixir_to_erl(Docs)] };
+ {clause, 0, [{atom, 0, docs}], [], [elixir_utils:elixir_to_erl(Docs)]};
docs_clause(_Module, _) ->
- { clause, 0, [{ atom, 0, docs }], [], [{ atom, 0, nil }] }.
+ {clause, 0, [{atom, 0, docs}], [], [{atom, 0, nil}]}.
moduledoc_clause(Line, Module, true) ->
Docs = 'Elixir.Module':get_attribute(Module, moduledoc),
- { clause, 0, [{ atom, 0, moduledoc }], [], [elixir_utils:elixir_to_erl({ Line, Docs })] };
+ {clause, 0, [{atom, 0, moduledoc}], [], [elixir_utils:elixir_to_erl({Line, Docs})]};
moduledoc_clause(_Line, _Module, _) ->
- { clause, 0, [{ atom, 0, moduledoc }], [], [{ atom, 0, nil }] }.
+ {clause, 0, [{atom, 0, moduledoc}], [], [{atom, 0, nil}]}.
else_clause() ->
- Info = { call, 0, { atom, 0, module_info }, [{ var, 0, atom }] },
- { clause, 0, [{ var, 0, atom }], [], [Info] }.
+ Info = {call, 0, {atom, 0, module_info}, [{var, 0, atom}]},
+ {clause, 0, [{var, 0, atom}], [], [Info]}.
% HELPERS
@@ -355,9 +355,9 @@ eval_callbacks(Line, Module, Name, Args, E) ->
Meta = [{line,Line},{require,false}],
lists:foldl(fun({M,F}, Acc) ->
- { Expr, ET } = elixir_dispatch:dispatch_require(Meta, M, F, Args, Acc, fun(AM, AF, AA) ->
+ {Expr, ET} = elixir_dispatch:dispatch_require(Meta, M, F, Args, Acc, fun(AM, AF, AA) ->
apply(AM, AF, AA),
- { nil, Acc }
+ {nil, Acc}
end),
if
@@ -365,23 +365,23 @@ eval_callbacks(Line, Module, Name, Args, E) ->
ET;
true ->
try
- { _Value, _Binding, EE, _S } = elixir:eval_forms(Expr, [], ET),
+ {_Value, _Binding, EE, _S} = elixir:eval_forms(Expr, [], ET),
EE
catch
Kind:Reason ->
- Info = { M, F, length(Args), location(Line, E) },
+ Info = {M, F, length(Args), location(Line, E)},
erlang:raise(Kind, Reason, prune_stacktrace(Info, erlang:get_stacktrace()))
end
end
end, E, Callbacks).
location(Line, E) ->
- [{ file, elixir_utils:characters_to_list(E#elixir_env.file) }, { line, Line }].
+ [{file, elixir_utils:characters_to_list(E#elixir_env.file)}, {line, Line}].
%% We've reached the elixir_module or eval internals, skip it with the rest
-prune_stacktrace(Info, [{ elixir, eval_forms, _, _ }|_]) ->
+prune_stacktrace(Info, [{elixir, eval_forms, _, _}|_]) ->
[Info];
-prune_stacktrace(Info, [{ elixir_module, _, _, _ }|_]) ->
+prune_stacktrace(Info, [{elixir_module, _, _, _}|_]) ->
[Info];
prune_stacktrace(Info, [H|T]) ->
[H|prune_stacktrace(Info, T)];
@@ -390,20 +390,20 @@ prune_stacktrace(Info, []) ->
% ERROR HANDLING
-format_error({ invalid_clause, { Name, Arity } }) ->
+format_error({invalid_clause, {Name, Arity}}) ->
io_lib:format("empty clause provided for nonexistent function or macro ~ts/~B", [Name, Arity]);
-format_error({ unused_doc, typedoc }) ->
+format_error({unused_doc, typedoc}) ->
"@typedoc provided but no type follows it";
-format_error({ unused_doc, doc }) ->
+format_error({unused_doc, doc}) ->
"@doc provided but no definition follows it";
-format_error({ internal_function_overridden, { Name, Arity } }) ->
+format_error({internal_function_overridden, {Name, Arity}}) ->
io_lib:format("function ~ts/~B is internal and should not be overridden", [Name, Arity]);
-format_error({ invalid_module, Module}) ->
+format_error({invalid_module, Module}) ->
io_lib:format("invalid module name: ~p", [Module]);
-format_error({ module_defined, Module }) ->
+format_error({module_defined, Module}) ->
io_lib:format("redefining module ~ts", [elixir_aliases:inspect(Module)]);
-format_error({ module_reserved, Module }) ->
+format_error({module_reserved, Module}) ->
io_lib:format("module ~ts is reserved and cannot be defined", [elixir_aliases:inspect(Module)]);
-format_error({ module_in_definition, Module }) ->
+format_error({module_in_definition, Module}) ->
io_lib:format("cannot define module ~ts because it is currently being defined",
[elixir_aliases:inspect(Module)]).
diff --git a/lib/elixir/src/elixir_parser.yrl b/lib/elixir/src/elixir_parser.yrl
index 62e5607e4..ff05f97c4 100644
--- a/lib/elixir/src/elixir_parser.yrl
+++ b/lib/elixir/src/elixir_parser.yrl
@@ -41,7 +41,7 @@ Expect 2.
%% Changes in ops and precedence should be reflected on lib/elixir/lib/macro.ex
%% Note though the operator => in practice has lower precedence than all others.
-%% Its entry in the table is only to support the %{ user | foo => bar } syntax.
+%% Its entry in the table is only to support the %{user | foo => bar} syntax.
Left 5 do.
Right 10 stab_op_eol. %% ->
Left 20 ','.
@@ -130,53 +130,53 @@ block_expr -> parens_call call_args_parens call_args_parens do_block : build_nes
block_expr -> dot_do_identifier do_block : build_identifier('$1', '$2').
block_expr -> dot_identifier call_args_no_parens_all do_block : build_identifier('$1', '$2' ++ '$3').
-op_expr -> match_op_eol expr : { '$1', '$2' }.
-op_expr -> add_op_eol expr : { '$1', '$2' }.
-op_expr -> mult_op_eol expr : { '$1', '$2' }.
-op_expr -> exp_op_eol expr : { '$1', '$2' }.
-op_expr -> two_op_eol expr : { '$1', '$2' }.
-op_expr -> and_op_eol expr : { '$1', '$2' }.
-op_expr -> or_op_eol expr : { '$1', '$2' }.
-op_expr -> in_op_eol expr : { '$1', '$2' }.
-op_expr -> in_match_op_eol expr : { '$1', '$2' }.
-op_expr -> type_op_eol expr : { '$1', '$2' }.
-op_expr -> when_op_eol expr : { '$1', '$2' }.
-op_expr -> pipe_op_eol expr : { '$1', '$2' }.
-op_expr -> comp_op_eol expr : { '$1', '$2' }.
-op_expr -> arrow_op_eol expr : { '$1', '$2' }.
-
-no_parens_op_expr -> match_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> add_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> mult_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> exp_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> two_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> and_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> or_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> in_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> in_match_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> type_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> pipe_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> comp_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> arrow_op_eol no_parens_expr : { '$1', '$2' }.
+op_expr -> match_op_eol expr : {'$1', '$2'}.
+op_expr -> add_op_eol expr : {'$1', '$2'}.
+op_expr -> mult_op_eol expr : {'$1', '$2'}.
+op_expr -> exp_op_eol expr : {'$1', '$2'}.
+op_expr -> two_op_eol expr : {'$1', '$2'}.
+op_expr -> and_op_eol expr : {'$1', '$2'}.
+op_expr -> or_op_eol expr : {'$1', '$2'}.
+op_expr -> in_op_eol expr : {'$1', '$2'}.
+op_expr -> in_match_op_eol expr : {'$1', '$2'}.
+op_expr -> type_op_eol expr : {'$1', '$2'}.
+op_expr -> when_op_eol expr : {'$1', '$2'}.
+op_expr -> pipe_op_eol expr : {'$1', '$2'}.
+op_expr -> comp_op_eol expr : {'$1', '$2'}.
+op_expr -> arrow_op_eol expr : {'$1', '$2'}.
+
+no_parens_op_expr -> match_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> add_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> mult_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> exp_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> two_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> and_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> or_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> in_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> in_match_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> type_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> pipe_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> comp_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> arrow_op_eol no_parens_expr : {'$1', '$2'}.
%% Allow when (and only when) with keywords
-no_parens_op_expr -> when_op_eol no_parens_expr : { '$1', '$2' }.
-no_parens_op_expr -> when_op_eol call_args_no_parens_kw : { '$1', '$2' }.
-
-matched_op_expr -> match_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> add_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> mult_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> exp_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> two_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> and_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> or_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> in_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> in_match_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> type_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> when_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> pipe_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> comp_op_eol matched_expr : { '$1', '$2' }.
-matched_op_expr -> arrow_op_eol matched_expr : { '$1', '$2' }.
+no_parens_op_expr -> when_op_eol no_parens_expr : {'$1', '$2'}.
+no_parens_op_expr -> when_op_eol call_args_no_parens_kw : {'$1', '$2'}.
+
+matched_op_expr -> match_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> add_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> mult_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> exp_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> two_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> and_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> or_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> in_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> in_match_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> type_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> when_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> pipe_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> comp_op_eol matched_expr : {'$1', '$2'}.
+matched_op_expr -> arrow_op_eol matched_expr : {'$1', '$2'}.
no_parens_one_expr -> dot_op_identifier call_args_no_parens_one : build_identifier('$1', '$2').
no_parens_one_expr -> dot_identifier call_args_no_parens_one : build_identifier('$1', '$2').
@@ -193,7 +193,7 @@ access_expr -> unary_op_eol number : build_unary_op('$1', ?exprs('$2')).
access_expr -> fn_eol stab end_eol : build_fn('$1', build_stab(reverse('$2'))).
access_expr -> open_paren stab close_paren : build_stab(reverse('$2')).
access_expr -> number : ?exprs('$1').
-access_expr -> signed_number : { element(4, '$1'), meta('$1'), ?exprs('$1') }.
+access_expr -> signed_number : {element(4, '$1'), meta('$1'), ?exprs('$1')}.
access_expr -> list : element(1, '$1').
access_expr -> map : '$1'.
access_expr -> tuple : '$1'.
@@ -224,9 +224,9 @@ bracket_at_expr -> at_op_eol access_expr list :
%% Blocks
do_block -> do_eol 'end' : [[{do,nil}]].
-do_block -> do_eol stab end_eol : [[{ do, build_stab(reverse('$2')) }]].
-do_block -> do_eol block_list 'end' : [[{ do, nil }|'$2']].
-do_block -> do_eol stab_eol block_list 'end' : [[{ do, build_stab(reverse('$2')) }|'$3']].
+do_block -> do_eol stab end_eol : [[{do, build_stab(reverse('$2'))}]].
+do_block -> do_eol block_list 'end' : [[{do, nil}|'$2']].
+do_block -> do_eol stab_eol block_list 'end' : [[{do, build_stab(reverse('$2'))}|'$3']].
fn_eol -> 'fn' : '$1'.
fn_eol -> 'fn' eol : '$1'.
@@ -253,13 +253,13 @@ stab_expr -> call_args_no_parens_all stab_op_eol stab_maybe_expr :
stab_expr -> stab_parens_many stab_op_eol stab_maybe_expr :
build_op('$2', unwrap_splice('$1'), '$3').
stab_expr -> stab_parens_many when_op expr stab_op_eol stab_maybe_expr :
- build_op('$4', [{ 'when', meta('$2'), unwrap_splice('$1') ++ ['$3'] }], '$5').
+ build_op('$4', [{'when', meta('$2'), unwrap_splice('$1') ++ ['$3']}], '$5').
stab_maybe_expr -> 'expr' : '$1'.
stab_maybe_expr -> '$empty' : nil.
-block_item -> block_eol stab_eol : { ?exprs('$1'), build_stab(reverse('$2')) }.
-block_item -> block_eol : { ?exprs('$1'), nil }.
+block_item -> block_eol stab_eol : {?exprs('$1'), build_stab(reverse('$2'))}.
+block_item -> block_eol : {?exprs('$1'), nil}.
block_list -> block_item : ['$1'].
block_list -> block_item block_list : ['$1'|'$2'].
@@ -353,7 +353,7 @@ dot_op -> '.' eol : '$1'.
dot_identifier -> identifier : '$1'.
dot_identifier -> matched_expr dot_op identifier : build_dot('$2', '$1', '$3').
-dot_alias -> aliases : { '__aliases__', meta('$1', 0), ?exprs('$1') }.
+dot_alias -> aliases : {'__aliases__', meta('$1', 0), ?exprs('$1')}.
dot_alias -> matched_expr dot_op aliases : build_dot_alias('$2', '$1', '$3').
dot_op_identifier -> op_identifier : '$1'.
@@ -369,7 +369,7 @@ dot_paren_identifier -> paren_identifier : '$1'.
dot_paren_identifier -> matched_expr dot_op paren_identifier : build_dot('$2', '$1', '$3').
parens_call -> dot_paren_identifier : '$1'.
-parens_call -> matched_expr dot_call_op : { '.', meta('$2'), ['$1'] }. % Fun/local calls
+parens_call -> matched_expr dot_call_op : {'.', meta('$2'), ['$1']}. % Fun/local calls
% Function calls with no parentheses
@@ -431,13 +431,13 @@ kw_eol -> kw_identifier eol : ?exprs('$1').
kw_eol -> kw_identifier_string : build_atom_string('$1').
kw_eol -> kw_identifier_string eol : build_atom_string('$1').
-kw_base -> kw_eol container_expr : [{ '$1', '$2' }].
-kw_base -> kw_base ',' kw_eol container_expr : [{ '$3', '$4' }|'$1'].
+kw_base -> kw_eol container_expr : [{'$1', '$2'}].
+kw_base -> kw_base ',' kw_eol container_expr : [{'$3', '$4'}|'$1'].
kw -> kw_base : reverse('$1').
kw -> kw_base ',' : reverse('$1').
-call_args_no_parens_kw_expr -> kw_eol call_args_no_parens_expr : { '$1','$2' }.
+call_args_no_parens_kw_expr -> kw_eol call_args_no_parens_expr : {'$1','$2'}.
call_args_no_parens_kw -> call_args_no_parens_kw_expr : ['$1'].
call_args_no_parens_kw -> call_args_no_parens_kw_expr ',' call_args_no_parens_kw : ['$1'|'$3'].
@@ -470,15 +470,15 @@ map_expr -> at_op_eol map_expr : build_unary_op('$1', '$2').
assoc_op_eol -> assoc_op : '$1'.
assoc_op_eol -> assoc_op eol : '$1'.
-assoc_expr -> container_expr assoc_op_eol container_expr : { '$1', '$3' }.
+assoc_expr -> container_expr assoc_op_eol container_expr : {'$1', '$3'}.
assoc_expr -> map_expr : '$1'.
-assoc_update -> matched_expr pipe_op_eol matched_expr assoc_op_eol matched_expr : { '$2', '$1', [{ '$3', '$5' }] }.
-assoc_update -> unmatched_expr pipe_op_eol expr assoc_op_eol expr : { '$2', '$1', [{ '$3', '$5' }] }.
-assoc_update -> matched_expr pipe_op_eol map_expr : { '$2', '$1', ['$3'] }.
+assoc_update -> matched_expr pipe_op_eol matched_expr assoc_op_eol matched_expr : {'$2', '$1', [{'$3', '$5'}]}.
+assoc_update -> unmatched_expr pipe_op_eol expr assoc_op_eol expr : {'$2', '$1', [{'$3', '$5'}]}.
+assoc_update -> matched_expr pipe_op_eol map_expr : {'$2', '$1', ['$3']}.
-assoc_update_kw -> matched_expr pipe_op_eol kw : { '$2', '$1', '$3' }.
-assoc_update_kw -> unmatched_expr pipe_op_eol kw : { '$2', '$1', '$3' }.
+assoc_update_kw -> matched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
+assoc_update_kw -> unmatched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
assoc_base -> assoc_expr : ['$1'].
assoc_base -> assoc_base ',' assoc_expr : ['$3'|'$1'].
@@ -504,8 +504,8 @@ struct_op -> '%' : '$1'.
struct_op -> '%' eol : '$1'.
map -> map_op map_args : '$2'.
-map -> struct_op map_expr map_args : { '%', meta('$1'), ['$2', '$3'] }.
-map -> struct_op map_expr eol map_args : { '%', meta('$1'), ['$2', '$4'] }.
+map -> struct_op map_expr map_args : {'%', meta('$1'), ['$2', '$3']}.
+map -> struct_op map_expr eol map_args : {'%', meta('$1'), ['$2', '$4']}.
Erlang code.
@@ -526,58 +526,58 @@ meta(Node) -> meta(?line(Node)).
%% Operators
-build_op({ _Kind, Line, '/' }, { '&', _, [{ Kind, _, Atom } = Left] }, Right) when is_number(Right), is_atom(Atom), is_atom(Kind) ->
- { '&', meta(Line), [{ '/', meta(Line), [Left, Right] }] };
+build_op({_Kind, Line, '/'}, {'&', _, [{Kind, _, Atom} = Left]}, Right) when is_number(Right), is_atom(Atom), is_atom(Kind) ->
+ {'&', meta(Line), [{'/', meta(Line), [Left, Right]}]};
-build_op({ _Kind, Line, '/' }, { '&', _, [{ { '.', _, [_, _] }, _, [] } = Left] }, Right) when is_number(Right) ->
- { '&', meta(Line), [{ '/', meta(Line), [Left, Right] }] };
+build_op({_Kind, Line, '/'}, {'&', _, [{{'.', _, [_, _]}, _, []} = Left]}, Right) when is_number(Right) ->
+ {'&', meta(Line), [{'/', meta(Line), [Left, Right]}]};
-build_op({ _Kind, Line, 'in' }, { UOp, _, [Left] }, Right) when ?rearrange_uop(UOp) ->
- { UOp, meta(Line), [{ 'in', meta(Line), [Left, Right] }] };
+build_op({_Kind, Line, 'in'}, {UOp, _, [Left]}, Right) when ?rearrange_uop(UOp) ->
+ {UOp, meta(Line), [{'in', meta(Line), [Left, Right]}]};
-build_op({ _Kind, Line, Op }, Left, Right) ->
- { Op, meta(Line), [Left, Right] }.
+build_op({_Kind, Line, Op}, Left, Right) ->
+ {Op, meta(Line), [Left, Right]}.
-build_unary_op({ _Kind, Line, Op }, Expr) ->
- { Op, meta(Line), [Expr] }.
+build_unary_op({_Kind, Line, Op}, Expr) ->
+ {Op, meta(Line), [Expr]}.
build_list(Marker, Args) ->
- { Args, ?line(Marker) }.
+ {Args, ?line(Marker)}.
build_tuple(_Marker, [Left, Right]) ->
- { Left, Right };
+ {Left, Right};
build_tuple(Marker, Args) ->
- { '{}', meta(Marker), Args }.
+ {'{}', meta(Marker), Args}.
build_bit(Marker, Args) ->
- { '<<>>', meta(Marker), Args }.
+ {'<<>>', meta(Marker), Args}.
build_map(Marker, Args) ->
- { '%{}', meta(Marker), Args }.
+ {'%{}', meta(Marker), Args}.
-build_map_update(Marker, { Pipe, Left, Right }, Extra) ->
- { '%{}', meta(Marker), [build_op(Pipe, Left, Right ++ Extra)] }.
+build_map_update(Marker, {Pipe, Left, Right}, Extra) ->
+ {'%{}', meta(Marker), [build_op(Pipe, Left, Right ++ Extra)]}.
%% Blocks
-build_block([{Op,_,[_]}]=Exprs) when ?rearrange_uop(Op) -> { '__block__', [], Exprs };
+build_block([{Op,_,[_]}]=Exprs) when ?rearrange_uop(Op) -> {'__block__', [], Exprs};
build_block([{unquote_splicing,_,Args}]=Exprs) when
- length(Args) =< 2 -> { '__block__', [], Exprs };
+ length(Args) =< 2 -> {'__block__', [], Exprs};
build_block([Expr]) -> Expr;
-build_block(Exprs) -> { '__block__', [], Exprs }.
+build_block(Exprs) -> {'__block__', [], Exprs}.
%% Dots
-build_dot_alias(Dot, { '__aliases__', _, Left }, { 'aliases', _, Right }) ->
- { '__aliases__', meta(Dot), Left ++ Right };
+build_dot_alias(Dot, {'__aliases__', _, Left}, {'aliases', _, Right}) ->
+ {'__aliases__', meta(Dot), Left ++ Right};
-build_dot_alias(Dot, Other, { 'aliases', _, Right }) ->
- { '__aliases__', meta(Dot), [Other|Right] }.
+build_dot_alias(Dot, Other, {'aliases', _, Right}) ->
+ {'__aliases__', meta(Dot), [Other|Right]}.
build_dot(Dot, Left, Right) ->
- { '.', meta(Dot), [Left, extract_identifier(Right)] }.
+ {'.', meta(Dot), [Left, extract_identifier(Right)]}.
-extract_identifier({ Kind, _, Identifier }) when
+extract_identifier({Kind, _, Identifier}) when
Kind == identifier; Kind == bracket_identifier; Kind == paren_identifier;
Kind == do_identifier; Kind == op_identifier ->
Identifier.
@@ -587,57 +587,57 @@ extract_identifier({ Kind, _, Identifier }) when
build_nested_parens(Dot, Args1, Args2) ->
Identifier = build_identifier(Dot, Args1),
Meta = element(2, Identifier),
- { Identifier, Meta, Args2 }.
+ {Identifier, Meta, Args2}.
-build_identifier({ '.', Meta, _ } = Dot, Args) ->
+build_identifier({'.', Meta, _} = Dot, Args) ->
FArgs = case Args of
nil -> [];
_ -> Args
end,
- { Dot, Meta, FArgs };
+ {Dot, Meta, FArgs};
-build_identifier({ Keyword, Line }, Args) when Keyword == fn ->
- { fn, meta(Line), Args };
+build_identifier({Keyword, Line}, Args) when Keyword == fn ->
+ {fn, meta(Line), Args};
-build_identifier({ op_identifier, Line, Identifier }, [Arg]) ->
- { Identifier, [{ambiguous_op,nil}|meta(Line)], [Arg] };
+build_identifier({op_identifier, Line, Identifier}, [Arg]) ->
+ {Identifier, [{ambiguous_op,nil}|meta(Line)], [Arg]};
-build_identifier({ _, Line, Identifier }, Args) ->
- { Identifier, meta(Line), Args }.
+build_identifier({_, Line, Identifier}, Args) ->
+ {Identifier, meta(Line), Args}.
%% Fn
build_fn(Op, Stab) ->
- { fn, meta(Op), Stab }.
+ {fn, meta(Op), Stab}.
%% Access
-build_access(Expr, { List, Line }) ->
+build_access(Expr, {List, Line}) ->
Meta = meta(Line),
- { { '.', Meta, ['Elixir.Kernel', access] }, Meta, [Expr, List] }.
+ {{'.', Meta, ['Elixir.Kernel', access]}, Meta, [Expr, List]}.
%% Interpolation aware
-build_sigil({ sigil, Line, Sigil, Parts, Modifiers }) ->
+build_sigil({sigil, Line, Sigil, Parts, Modifiers}) ->
Meta = meta(Line),
- { list_to_atom("sigil_" ++ [Sigil]), Meta, [ { '<<>>', Meta, string_parts(Parts) }, Modifiers ] }.
+ {list_to_atom("sigil_" ++ [Sigil]), Meta, [ {'<<>>', Meta, string_parts(Parts)}, Modifiers ]}.
-build_bin_string({ bin_string, _Line, [H] }) when is_binary(H) ->
+build_bin_string({bin_string, _Line, [H]}) when is_binary(H) ->
H;
-build_bin_string({ bin_string, Line, Args }) ->
- { '<<>>', meta(Line), string_parts(Args) }.
+build_bin_string({bin_string, Line, Args}) ->
+ {'<<>>', meta(Line), string_parts(Args)}.
-build_list_string({ list_string, _Line, [H] }) when is_binary(H) ->
+build_list_string({list_string, _Line, [H]}) when is_binary(H) ->
elixir_utils:characters_to_list(H);
-build_list_string({ list_string, Line, Args }) ->
+build_list_string({list_string, Line, Args}) ->
Meta = meta(Line),
- { { '.', Meta, ['Elixir.List', 'from_char_data!'] }, Meta, [{ '<<>>', Meta, string_parts(Args) }] }.
+ {{'.', Meta, ['Elixir.List', 'from_char_data!']}, Meta, [{'<<>>', Meta, string_parts(Args)}]}.
-build_atom_string({ Atom, _Line, Safe, [H] }) when ?is_atom_string(Atom) andalso is_binary(H) ->
+build_atom_string({Atom, _Line, Safe, [H]}) when ?is_atom_string(Atom) andalso is_binary(H) ->
Op = binary_to_atom_op(Safe), erlang:Op(H, utf8);
-build_atom_string({ Atom, Line, Safe, Args }) when ?is_atom_string(Atom) ->
+build_atom_string({Atom, Line, Safe, Args}) when ?is_atom_string(Atom) ->
Meta = meta(Line),
- { { '.', Meta, [erlang, binary_to_atom_op(Safe)] }, Meta, [{ '<<>>', Meta, string_parts(Args) }, utf8] }.
+ {{'.', Meta, [erlang, binary_to_atom_op(Safe)]}, Meta, [{'<<>>', Meta, string_parts(Args)}, utf8]}.
binary_to_atom_op(true) -> binary_to_existing_atom;
binary_to_atom_op(false) -> binary_to_atom.
@@ -646,34 +646,34 @@ string_parts(Parts) ->
[string_part(Part) || Part <- Parts].
string_part(Binary) when is_binary(Binary) ->
Binary;
-string_part({ Line, Tokens }) ->
+string_part({Line, Tokens}) ->
Form = string_tokens_parse(Tokens),
Meta = meta(Line),
- { '::', Meta, [{ { '.', Meta, ['Elixir.Kernel', to_string] }, Meta, [Form]}, { binary, Meta, nil }]}.
+ {'::', Meta, [{{'.', Meta, ['Elixir.Kernel', to_string]}, Meta, [Form]}, {binary, Meta, nil}]}.
string_tokens_parse(Tokens) ->
case parse(Tokens) of
- { ok, Forms } -> Forms;
- { error, _ } = Error -> throw(Error)
+ {ok, Forms} -> Forms;
+ {error, _} = Error -> throw(Error)
end.
%% Keywords
-build_stab([{ '->', Meta, [Left, Right] }|T]) ->
+build_stab([{'->', Meta, [Left, Right]}|T]) ->
build_stab(Meta, T, Left, [Right], []);
build_stab(Else) ->
build_block(Else).
-build_stab(Old, [{ '->', New, [Left, Right] }|T], Marker, Temp, Acc) ->
- H = { '->', Old, [Marker, build_block(reverse(Temp))] },
+build_stab(Old, [{'->', New, [Left, Right]}|T], Marker, Temp, Acc) ->
+ H = {'->', Old, [Marker, build_block(reverse(Temp))]},
build_stab(New, T, Left, [Right], [H|Acc]);
build_stab(Meta, [H|T], Marker, Temp, Acc) ->
build_stab(Meta, T, Marker, [H|Temp], Acc);
build_stab(Meta, [], Marker, Temp, Acc) ->
- H = { '->', Meta, [Marker, build_block(reverse(Temp))] },
+ H = {'->', Meta, [Marker, build_block(reverse(Temp))]},
reverse([H|Acc]).
%% Every time the parser sees a (unquote_splicing())
@@ -683,26 +683,26 @@ build_stab(Meta, [], Marker, Temp, Acc) ->
%% case, we don't actually want the block, since it is
%% an arg style call. unwrap_splice unwraps the splice
%% from such blocks.
-unwrap_splice([{ '__block__', [], [{ unquote_splicing, _, _ }] = Splice }]) ->
+unwrap_splice([{'__block__', [], [{unquote_splicing, _, _}] = Splice}]) ->
Splice;
unwrap_splice(Other) -> Other.
unwrap_when(Args) ->
case elixir_utils:split_last(Args) of
- { Start, { 'when', Meta, [_, _] = End } } ->
- [{ 'when', Meta, Start ++ End }];
- { _, _ } ->
+ {Start, {'when', Meta, [_, _] = End}} ->
+ [{'when', Meta, Start ++ End}];
+ {_, _} ->
Args
end.
to_block([One]) when not is_list(One) -> One;
-to_block(Other) -> { '__block__', [], reverse(Other) }.
+to_block(Other) -> {'__block__', [], reverse(Other)}.
%% Errors
throw(Line, Error, Token) ->
- throw({ error, { Line, ?MODULE, [Error, Token] }}).
+ throw({error, {Line, ?MODULE, [Error, Token]}}).
throw_no_parens_strict(Token) ->
throw(?line(Token), "unexpected parenthesis. If you are making a "
@@ -712,7 +712,7 @@ throw_no_parens_strict(Token) ->
throw_no_parens_many_strict(Token) ->
Line =
case lists:keyfind(line, 1, element(2, Token)) of
- { line, L } -> L;
+ {line, L} -> L;
false -> 0
end,
diff --git a/lib/elixir/src/elixir_quote.erl b/lib/elixir/src/elixir_quote.erl
index 03b5a7ac3..a2e4939d1 100644
--- a/lib/elixir/src/elixir_quote.erl
+++ b/lib/elixir/src/elixir_quote.erl
@@ -5,7 +5,7 @@
-include("elixir.hrl").
-define(defs(Kind), Kind == def; Kind == defp; Kind == defmacro; Kind == defmacrop).
-define(lexical(Kind), Kind == import; Kind == alias; Kind == '__aliases__').
--compile({ inline, [keyfind/2, keystore/3, keydelete/2, keyreplace/3, keynew/3] }).
+-compile({inline, [keyfind/2, keystore/3, keydelete/2, keyreplace/3, keynew/3]}).
%% Apply the line from site call on quoted contents.
%% Receives a Key to look for the default line as argument.
@@ -19,18 +19,18 @@ linify(Line, Key, Exprs) when is_integer(Line) ->
linify_with_context_counter(Line, Var, Exprs) when is_integer(Line) ->
do_linify(Line, line, Var, Exprs).
-do_linify(Line, Key, { Receiver, Counter } = Var, { Left, Meta, Receiver })
+do_linify(Line, Key, {Receiver, Counter} = Var, {Left, Meta, Receiver})
when is_atom(Left), is_list(Meta), Left /= '_' ->
do_tuple_linify(Line, Key, Var, keynew(counter, Meta, Counter), Left, Receiver);
-do_linify(Line, Key, { _, Counter } = Var, { Lexical, [_|_] = Meta, [_|_] = Args }) when ?lexical(Lexical) ->
+do_linify(Line, Key, {_, Counter} = Var, {Lexical, [_|_] = Meta, [_|_] = Args}) when ?lexical(Lexical) ->
do_tuple_linify(Line, Key, Var, keynew(counter, Meta, Counter), Lexical, Args);
-do_linify(Line, Key, Var, { Left, Meta, Right }) when is_list(Meta) ->
+do_linify(Line, Key, Var, {Left, Meta, Right}) when is_list(Meta) ->
do_tuple_linify(Line, Key, Var, Meta, Left, Right);
-do_linify(Line, Key, Var, { Left, Right }) ->
- { do_linify(Line, Key, Var, Left), do_linify(Line, Key, Var, Right) };
+do_linify(Line, Key, Var, {Left, Right}) ->
+ {do_linify(Line, Key, Var, Left), do_linify(Line, Key, Var, Right)};
do_linify(Line, Key, Var, List) when is_list(List) ->
[do_linify(Line, Key, Var, X) || X <- List];
@@ -38,23 +38,23 @@ do_linify(Line, Key, Var, List) when is_list(List) ->
do_linify(_, _, _, Else) -> Else.
do_tuple_linify(Line, Key, Var, Meta, Left, Right) ->
- { do_linify(Line, Key, Var, Left),
+ {do_linify(Line, Key, Var, Left),
do_linify_meta(Line, Key, Meta),
- do_linify(Line, Key, Var, Right) }.
+ do_linify(Line, Key, Var, Right)}.
do_linify_meta(0, line, Meta) ->
Meta;
do_linify_meta(Line, line, Meta) ->
case keyfind(line, Meta) of
- { line, Int } when is_integer(Int), Int /= 0 ->
+ {line, Int} when is_integer(Int), Int /= 0 ->
Meta;
_ ->
keystore(line, Meta, Line)
end;
do_linify_meta(Line, Key, Meta) ->
case keyfind(Key, Meta) of
- { Key, Int } when is_integer(Int), Int /= 0 ->
- keyreplace(Key, Meta, { line, Int });
+ {Key, Int} when is_integer(Int), Int /= 0 ->
+ keyreplace(Key, Meta, {line, Int});
_ ->
do_linify_meta(Line, line, Meta)
end.
@@ -64,32 +64,32 @@ do_linify_meta(Line, Key, Meta) ->
dot(Meta, Left, Right, Args, Context, File) ->
annotate(dot(Meta, Left, Right, Args), Context, File).
-dot(Meta, Left, { '__aliases__', _, Args }, nil) ->
- { '__aliases__', Meta, [Left|Args] };
+dot(Meta, Left, {'__aliases__', _, Args}, nil) ->
+ {'__aliases__', Meta, [Left|Args]};
dot(Meta, Left, Right, nil) when is_atom(Right) ->
case atom_to_list(Right) of
"Elixir." ++ _ ->
- { '__aliases__', Meta, [Left, Right] };
+ {'__aliases__', Meta, [Left, Right]};
_ ->
- { { '.', Meta, [Left, Right] }, Meta, [] }
+ {{'.', Meta, [Left, Right]}, Meta, []}
end;
-dot(Meta, Left, { Right, _, Context }, nil) when is_atom(Right), is_atom(Context) ->
- { { '.', Meta, [Left, Right] }, Meta, [] };
+dot(Meta, Left, {Right, _, Context}, nil) when is_atom(Right), is_atom(Context) ->
+ {{'.', Meta, [Left, Right]}, Meta, []};
-dot(Meta, Left, { Right, _, Args }, nil) when is_atom(Right) ->
- { { '.', Meta, [Left, Right] }, Meta, Args };
+dot(Meta, Left, {Right, _, Args}, nil) when is_atom(Right) ->
+ {{'.', Meta, [Left, Right]}, Meta, Args};
dot(_Meta, _Left, Right, nil) ->
argument_error(<<"expected unquote after dot to return an atom, an alias or a quoted call, got: ",
('Elixir.Macro':to_string(Right))/binary>>);
dot(Meta, Left, Right, Args) when is_atom(Right) ->
- { { '.', Meta, [Left, Right] }, Meta, Args };
+ {{'.', Meta, [Left, Right]}, Meta, Args};
-dot(Meta, Left, { Right, _, Context }, Args) when is_atom(Right), is_atom(Context) ->
- { { '.', Meta, [Left, Right] }, Meta, Args };
+dot(Meta, Left, {Right, _, Context}, Args) when is_atom(Right), is_atom(Context) ->
+ {{'.', Meta, [Left, Right]}, Meta, Args};
dot(_Meta, _Left, Right, _Args) ->
argument_error(<<"expected unquote after dot with args to return an atom or a quoted call, got: ",
@@ -106,7 +106,7 @@ tail_list(Left, Right, Tail) when is_list(Right), is_list(Tail) ->
tail_list(Left, Right, Tail) when is_list(Left) ->
validate_list(Left),
[H|T] = lists:reverse(Tail ++ Left),
- lists:reverse([{ '|', [], [H, Right] }|T]).
+ lists:reverse([{'|', [], [H, Right]}|T]).
validate_list(List) when is_list(List) ->
ok;
@@ -119,24 +119,24 @@ argument_error(Message) ->
%% Annotates the AST with context and other info
-annotate({ Def, Meta, [{ H, M, A }|T] }, Context, File) when ?defs(Def) ->
+annotate({Def, Meta, [{H, M, A}|T]}, Context, File) when ?defs(Def) ->
%% Store the context information in the first element of the
%% definition tuple so we can access it later on.
MM = keystore(context, keystore(file, M, File), Context),
- { Def, Meta, [{ H, MM, A }|T] };
-annotate({ { '.', _, [_, Def] } = Target, Meta, [{ H, M, A }|T] }, Context, File) when ?defs(Def) ->
+ {Def, Meta, [{H, MM, A}|T]};
+annotate({{'.', _, [_, Def]} = Target, Meta, [{H, M, A}|T]}, Context, File) when ?defs(Def) ->
MM = keystore(context, keystore(file, M, File), Context),
- { Target, Meta, [{ H, MM, A }|T] };
+ {Target, Meta, [{H, MM, A}|T]};
-annotate({ Lexical, Meta, [_|_] = Args }, Context, _File) when Lexical == import; Lexical == alias ->
+annotate({Lexical, Meta, [_|_] = Args}, Context, _File) when Lexical == import; Lexical == alias ->
NewMeta = keystore(context, keydelete(counter, Meta), Context),
- { Lexical, NewMeta, Args };
+ {Lexical, NewMeta, Args};
annotate(Tree, _Context, _File) -> Tree.
%% Escapes the given expression. It is similar to quote, but
%% lines are kept and hygiene mechanisms are disabled.
escape(Expr, Unquote) ->
- { Res, Q } = quote(Expr, nil, #elixir_quote{
+ {Res, Q} = quote(Expr, nil, #elixir_quote{
line=true,
keep=false,
vars_hygiene=false,
@@ -144,8 +144,8 @@ escape(Expr, Unquote) ->
imports_hygiene=false,
unquote=Unquote,
escape=true
- }, nil),
- { Res, Q#elixir_quote.unquoted }.
+ }, nil),
+ {Res, Q#elixir_quote.unquoted}.
%% Quotes an expression and return its quoted Elixir AST.
@@ -155,56 +155,56 @@ quote(Expr, nil, Q, E) ->
quote(Expr, Binding, Q, E) ->
Context = Q#elixir_quote.context,
- Vars = [ { '{}', [],
+ Vars = [ {'{}', [],
[ '=', [], [
- { '{}', [], [K, [], Context] },
+ {'{}', [], [K, [], Context]},
V
] ]
- } || { K, V } <- Binding],
+ } || {K, V} <- Binding],
- { TExprs, TQ } = do_quote(Expr, Q, E),
- { { '{}',[], ['__block__',[], Vars ++ [TExprs] ] }, TQ }.
+ {TExprs, TQ} = do_quote(Expr, Q, E),
+ {{'{}',[], ['__block__',[], Vars ++ [TExprs] ]}, TQ}.
%% Actual quoting and helpers
-do_quote({ quote, _, Args } = Tuple, #elixir_quote{unquote=true} = Q, E) when length(Args) == 1; length(Args) == 2 ->
- { TTuple, TQ } = do_quote_tuple(Tuple, Q#elixir_quote{unquote=false}, E),
- { TTuple, TQ#elixir_quote{unquote=true} };
+do_quote({quote, _, Args} = Tuple, #elixir_quote{unquote=true} = Q, E) when length(Args) == 1; length(Args) == 2 ->
+ {TTuple, TQ} = do_quote_tuple(Tuple, Q#elixir_quote{unquote=false}, E),
+ {TTuple, TQ#elixir_quote{unquote=true}};
-do_quote({ unquote, _Meta, [Expr] }, #elixir_quote{unquote=true} = Q, _) ->
- { Expr, Q#elixir_quote{unquoted=true} };
+do_quote({unquote, _Meta, [Expr]}, #elixir_quote{unquote=true} = Q, _) ->
+ {Expr, Q#elixir_quote{unquoted=true}};
%% Aliases
-do_quote({ '__aliases__', Meta, [H|T] } = Alias, #elixir_quote{aliases_hygiene=true} = Q, E) when is_atom(H) and (H /= 'Elixir') ->
+do_quote({'__aliases__', Meta, [H|T]} = Alias, #elixir_quote{aliases_hygiene=true} = Q, E) when is_atom(H) and (H /= 'Elixir') ->
Annotation = case elixir_aliases:expand(Alias, E#elixir_env.aliases,
E#elixir_env.macro_aliases, E#elixir_env.lexical_tracker) of
Atom when is_atom(Atom) -> Atom;
Aliases when is_list(Aliases) -> false
end,
AliasMeta = keystore(alias, keydelete(counter, Meta), Annotation),
- do_quote_tuple({ '__aliases__', AliasMeta, [H|T] }, Q, E);
+ do_quote_tuple({'__aliases__', AliasMeta, [H|T]}, Q, E);
%% Vars
-do_quote({ Left, Meta, nil }, #elixir_quote{vars_hygiene=true} = Q, E) when is_atom(Left) ->
- do_quote_tuple({ Left, Meta, Q#elixir_quote.context }, Q, E);
+do_quote({Left, Meta, nil}, #elixir_quote{vars_hygiene=true} = Q, E) when is_atom(Left) ->
+ do_quote_tuple({Left, Meta, Q#elixir_quote.context}, Q, E);
%% Unquote
-do_quote({ { { '.', Meta, [Left, unquote] }, _, [Expr] }, _, Args }, #elixir_quote{unquote=true} = Q, E) ->
+do_quote({{{'.', Meta, [Left, unquote]}, _, [Expr]}, _, Args}, #elixir_quote{unquote=true} = Q, E) ->
do_quote_call(Left, Meta, Expr, Args, Q, E);
-do_quote({ { '.', Meta, [Left, unquote] }, _, [Expr] }, #elixir_quote{unquote=true} = Q, E) ->
+do_quote({{'.', Meta, [Left, unquote]}, _, [Expr]}, #elixir_quote{unquote=true} = Q, E) ->
do_quote_call(Left, Meta, Expr, nil, Q, E);
%% Imports
-do_quote({ '&', Meta, [{ '/', _, [{F, _, C}, A]}] = Args },
+do_quote({'&', Meta, [{'/', _, [{F, _, C}, A]}] = Args},
#elixir_quote{imports_hygiene=true} = Q, E) when is_atom(F), is_integer(A), is_atom(C) ->
do_quote_fa('&', Meta, Args, F, A, Q, E);
-do_quote({ Name, Meta, ArgsOrAtom }, #elixir_quote{imports_hygiene=true} = Q, E) when is_atom(Name) ->
+do_quote({Name, Meta, ArgsOrAtom}, #elixir_quote{imports_hygiene=true} = Q, E) when is_atom(Name) ->
Arity = case is_atom(ArgsOrAtom) of
true -> 0;
false -> length(ArgsOrAtom)
@@ -214,82 +214,82 @@ do_quote({ Name, Meta, ArgsOrAtom }, #elixir_quote{imports_hygiene=true} = Q, E)
elixir_dispatch:find_import(Meta, Name, Arity, E) of
false ->
case (Arity == 1) andalso keyfind(ambiguous_op, Meta) of
- { ambiguous_op, nil } -> keystore(ambiguous_op, Meta, Q#elixir_quote.context);
+ {ambiguous_op, nil} -> keystore(ambiguous_op, Meta, Q#elixir_quote.context);
_ -> Meta
end;
Receiver ->
keystore(import, keystore(context, Meta, Q#elixir_quote.context), Receiver)
end,
- Annotated = annotate({ Name, NewMeta, ArgsOrAtom }, Q#elixir_quote.context, file(E, Q)),
+ Annotated = annotate({Name, NewMeta, ArgsOrAtom}, Q#elixir_quote.context, file(E, Q)),
do_quote_tuple(Annotated, Q, E);
-do_quote({ _, _, _ } = Tuple, #elixir_quote{escape=false} = Q, E) ->
+do_quote({_, _, _} = Tuple, #elixir_quote{escape=false} = Q, E) ->
Annotated = annotate(Tuple, Q#elixir_quote.context, file(E, Q)),
do_quote_tuple(Annotated, Q, E);
%% Literals
-do_quote({ Left, Right }, #elixir_quote{unquote=true} = Q, E) when
+do_quote({Left, Right}, #elixir_quote{unquote=true} = Q, E) when
is_tuple(Left) andalso (element(1, Left) == unquote_splicing);
is_tuple(Right) andalso (element(1, Right) == unquote_splicing) ->
- do_quote({ '{}', [], [Left, Right] }, Q, E);
+ do_quote({'{}', [], [Left, Right]}, Q, E);
-do_quote({ Left, Right }, Q, E) ->
- { TLeft, LQ } = do_quote(Left, Q, E),
- { TRight, RQ } = do_quote(Right, LQ, E),
- { { TLeft, TRight }, RQ };
+do_quote({Left, Right}, Q, E) ->
+ {TLeft, LQ} = do_quote(Left, Q, E),
+ {TRight, RQ} = do_quote(Right, LQ, E),
+ {{TLeft, TRight}, RQ};
do_quote(Map, #elixir_quote{escape=true} = Q, E) when is_map(Map) ->
- { TT, TQ } = do_quote(maps:to_list(Map), Q, E),
- { { '%{}', [], TT }, TQ };
+ {TT, TQ} = do_quote(maps:to_list(Map), Q, E),
+ {{'%{}', [], TT}, TQ};
do_quote(Tuple, #elixir_quote{escape=true} = Q, E) when is_tuple(Tuple) ->
- { TT, TQ } = do_quote(tuple_to_list(Tuple), Q, E),
- { { '{}', [], TT }, TQ };
+ {TT, TQ} = do_quote(tuple_to_list(Tuple), Q, E),
+ {{'{}', [], TT}, TQ};
do_quote(List, #elixir_quote{escape=true} = Q, E) when is_list(List) ->
% The improper case is pretty inefficient, but improper lists are are.
case reverse_improper(List) of
- { L } -> do_splice(L, Q, E);
- { L, R } ->
- { TL, QL } = do_splice(L, Q, E, [], []),
- { TR, QR } = do_quote(R, QL, E),
- { update_last(TL, fun(X) -> { '|', [], [X, TR] } end), QR }
+ {L} -> do_splice(L, Q, E);
+ {L, R} ->
+ {TL, QL} = do_splice(L, Q, E, [], []),
+ {TR, QR} = do_quote(R, QL, E),
+ {update_last(TL, fun(X) -> {'|', [], [X, TR]} end), QR}
end;
do_quote(List, Q, E) when is_list(List) ->
do_splice(lists:reverse(List), Q, E);
do_quote(Other, Q, _) ->
- { Other, Q }.
+ {Other, Q}.
%% Quote helpers
do_quote_call(Left, Meta, Expr, Args, Q, E) ->
- All = [meta(Meta, Q), Left, { unquote, Meta, [Expr] }, Args,
+ All = [meta(Meta, Q), Left, {unquote, Meta, [Expr]}, Args,
Q#elixir_quote.context, file(E, Q)],
- { TAll, TQ } = lists:mapfoldl(fun(X, Acc) -> do_quote(X, Acc, E) end, Q, All),
- { { { '.', Meta, [elixir_quote, dot] }, Meta, TAll }, TQ }.
+ {TAll, TQ} = lists:mapfoldl(fun(X, Acc) -> do_quote(X, Acc, E) end, Q, All),
+ {{{'.', Meta, [elixir_quote, dot]}, Meta, TAll}, TQ}.
do_quote_fa(Target, Meta, Args, F, A, Q, E) ->
NewMeta =
case (keyfind(import_fa, Meta) == false) andalso
elixir_dispatch:find_import(Meta, F, A, E) of
false -> Meta;
- Receiver -> keystore(import_fa, Meta, { Receiver, Q#elixir_quote.context })
+ Receiver -> keystore(import_fa, Meta, {Receiver, Q#elixir_quote.context})
end,
- do_quote_tuple({ Target, NewMeta, Args }, Q, E).
+ do_quote_tuple({Target, NewMeta, Args}, Q, E).
-do_quote_tuple({ Left, Meta, Right }, Q, E) ->
- { TLeft, LQ } = do_quote(Left, Q, E),
- { TRight, RQ } = do_quote(Right, LQ, E),
- { { '{}', [], [TLeft, meta(Meta, Q), TRight] }, RQ }.
+do_quote_tuple({Left, Meta, Right}, Q, E) ->
+ {TLeft, LQ} = do_quote(Left, Q, E),
+ {TRight, RQ} = do_quote(Right, LQ, E),
+ {{'{}', [], [TLeft, meta(Meta, Q), TRight]}, RQ}.
file(#elixir_env{file=File}, #elixir_quote{keep=true}) -> File;
file(_, _) -> nil.
meta(Meta, #elixir_quote{keep=true}) ->
- [case KV of { line, V } -> { keep, V }; _ -> KV end || KV <- Meta];
+ [case KV of {line, V} -> {keep, V}; _ -> KV end || KV <- Meta];
meta(Meta, #elixir_quote{line=true}) ->
Meta;
meta(Meta, #elixir_quote{line=false}) ->
@@ -298,9 +298,9 @@ meta(Meta, #elixir_quote{line=Line}) ->
keystore(line, Meta, Line).
reverse_improper(L) -> reverse_improper(L, []).
-reverse_improper([], Acc) -> { Acc };
+reverse_improper([], Acc) -> {Acc};
reverse_improper([H|T], Acc) when is_list(T) -> reverse_improper(T, [H|Acc]);
-reverse_improper([H|T], Acc) -> { [H|Acc], T }.
+reverse_improper([H|T], Acc) -> {[H|Acc], T}.
update_last([], _) -> [];
update_last([H], F) -> [F(H)];
@@ -313,43 +313,43 @@ keydelete(Key, Meta) ->
keystore(_Key, Meta, nil) ->
Meta;
keystore(Key, Meta, Value) ->
- lists:keystore(Key, 1, Meta, { Key, Value }).
-keyreplace(Key, Meta, { Key, _V }) ->
+ lists:keystore(Key, 1, Meta, {Key, Value}).
+keyreplace(Key, Meta, {Key, _V}) ->
Meta;
keyreplace(Key, Meta, Tuple) ->
lists:keyreplace(Key, 1, Meta, Tuple).
keynew(Key, Meta, Value) ->
case keyfind(Key, Meta) of
- { Key, _ } -> Meta;
+ {Key, _} -> Meta;
_ -> keystore(Key, Meta, Value)
end.
%% Quote splicing
-do_splice([{ '|', Meta, [{ unquote_splicing, _, [Left] }, Right] }|T], #elixir_quote{unquote=true} = Q, E) ->
+do_splice([{'|', Meta, [{unquote_splicing, _, [Left]}, Right]}|T], #elixir_quote{unquote=true} = Q, E) ->
%% Process the remaining entries on the list.
%% For [1, 2, 3, unquote_splicing(arg)|tail], this will quote
%% 1, 2 and 3, which could even be unquotes.
- { TT, QT } = do_splice(T, Q, E, [], []),
- { TR, QR } = do_quote(Right, QT, E),
- { do_runtime_list(Meta, tail_list, [Left, TR, TT]), QR#elixir_quote{unquoted=true} };
+ {TT, QT} = do_splice(T, Q, E, [], []),
+ {TR, QR} = do_quote(Right, QT, E),
+ {do_runtime_list(Meta, tail_list, [Left, TR, TT]), QR#elixir_quote{unquoted=true}};
do_splice(List, Q, E) ->
do_splice(List, Q, E, [], []).
-do_splice([{ unquote_splicing, Meta, [Expr] }|T], #elixir_quote{unquote=true} = Q, E, Buffer, Acc) ->
+do_splice([{unquote_splicing, Meta, [Expr]}|T], #elixir_quote{unquote=true} = Q, E, Buffer, Acc) ->
do_splice(T, Q#elixir_quote{unquoted=true}, E, [], do_runtime_list(Meta, list, [Expr, do_join(Buffer, Acc)]));
do_splice([H|T], Q, E, Buffer, Acc) ->
- { TH, TQ } = do_quote(H, Q, E),
+ {TH, TQ} = do_quote(H, Q, E),
do_splice(T, TQ, E, [TH|Buffer], Acc);
do_splice([], Q, _E, Buffer, Acc) ->
- { do_join(Buffer, Acc), Q }.
+ {do_join(Buffer, Acc), Q}.
do_join(Left, []) -> Left;
do_join([], Right) -> Right;
-do_join(Left, Right) -> { { '.', [], [erlang, '++'] }, [], [Left, Right] }.
+do_join(Left, Right) -> {{'.', [], [erlang, '++']}, [], [Left, Right]}.
do_runtime_list(Meta, Fun, Args) ->
- { { '.', Meta, [elixir_quote, Fun] }, Meta, Args }.
+ {{'.', Meta, [elixir_quote, Fun]}, Meta, Args}.
diff --git a/lib/elixir/src/elixir_scope.erl b/lib/elixir/src/elixir_scope.erl
index b3ec95981..6940c0dbf 100644
--- a/lib/elixir/src/elixir_scope.erl
+++ b/lib/elixir/src/elixir_scope.erl
@@ -11,11 +11,11 @@
translate_var(Meta, Name, Kind, S) when is_atom(Kind); is_integer(Kind) ->
Line = ?line(Meta),
- Tuple = { Name, Kind },
+ Tuple = {Name, Kind},
Vars = S#elixir_scope.vars,
- case orddict:find({ Name, Kind }, Vars) of
- { ok, { Current, _ } } -> Exists = true;
+ case orddict:find({Name, Kind}, Vars) of
+ {ok, {Current, _}} -> Exists = true;
error -> Current = nil, Exists = false
end,
@@ -25,42 +25,42 @@ translate_var(Meta, Name, Kind, S) when is_atom(Kind); is_integer(Kind) ->
case Exists andalso ordsets:is_element(Tuple, MatchVars) of
true ->
- { { var, Line, Current }, S };
+ {{var, Line, Current}, S};
false ->
%% If the variable is not exported, we use a counter name.
%% The same if the variable already exists or we are in a
%% noname context.
- Private = (lists:keyfind(export, 1, Meta) == { export, false }),
+ Private = (lists:keyfind(export, 1, Meta) == {export, false}),
- { NewVar, Counter, NS } =
+ {NewVar, Counter, NS} =
if
Kind /= nil ->
build_var('_', S);
Private orelse Exists orelse S#elixir_scope.noname ->
build_var(Name, S);
true ->
- { Name, 0, S }
+ {Name, 0, S}
end,
FS = NS#elixir_scope{
- vars=orddict:store(Tuple, { NewVar, Counter }, Vars),
+ vars=orddict:store(Tuple, {NewVar, Counter}, Vars),
match_vars=ordsets:add_element(Tuple, MatchVars),
export_vars=case S#elixir_scope.export_vars of
EV when Private; EV == nil -> EV;
- EV -> orddict:store(Tuple, { NewVar, Counter }, EV)
+ EV -> orddict:store(Tuple, {NewVar, Counter}, EV)
end
- },
+ },
- { { var, Line, NewVar }, FS }
+ {{var, Line, NewVar}, FS}
end;
_ when Exists ->
- { { var, Line, Current }, S }
+ {{var, Line, Current}, S}
end.
build_var(Key, S) ->
New = orddict:update_counter(Key, 1, S#elixir_scope.counter),
Cnt = orddict:fetch(Key, New),
- { ?atom_concat([Key, "@", Cnt]), Cnt, S#elixir_scope{counter=New} }.
+ {?atom_concat([Key, "@", Cnt]), Cnt, S#elixir_scope{counter=New}}.
%% SCOPE MERGING
@@ -71,7 +71,7 @@ mergev(S1, S2) ->
S2#elixir_scope{
vars=merge_vars(S1#elixir_scope.vars, S2#elixir_scope.vars),
export_vars=merge_opt_vars(S1#elixir_scope.export_vars, S2#elixir_scope.export_vars)
- }.
+ }.
%% Receives two scopes and return the first scope with
%% counters and flags from the later.
@@ -81,7 +81,7 @@ mergec(S1, S2) ->
counter=S2#elixir_scope.counter,
super=S2#elixir_scope.super,
caller=S2#elixir_scope.caller
- }.
+ }.
%% Similar to mergec but does not merge the user vars counter.
@@ -89,7 +89,7 @@ mergef(S1, S2) ->
S1#elixir_scope{
super=S2#elixir_scope.super,
caller=S2#elixir_scope.caller
- }.
+ }.
%% Mergers.
@@ -103,34 +103,34 @@ merge_opt_vars(C, C) -> C;
merge_opt_vars(C1, C2) ->
orddict:merge(fun var_merger/3, C1, C2).
-var_merger(_Var, { _, V1 } = K1, { _, V2 }) when V1 > V2 -> K1;
+var_merger(_Var, {_, V1} = K1, {_, V2}) when V1 > V2 -> K1;
var_merger(_Var, _K1, K2) -> K2.
%% BINDINGS
load_binding(Binding, Scope) ->
- { NewBinding, NewVars, NewCounter } = load_binding(Binding, [], [], 0),
- { NewBinding, Scope#elixir_scope{
+ {NewBinding, NewVars, NewCounter} = load_binding(Binding, [], [], 0),
+ {NewBinding, Scope#elixir_scope{
vars=NewVars,
counter=[{'_',NewCounter}]
- } }.
+ }}.
load_binding([{Key,Value}|T], Binding, Vars, Counter) ->
Actual = case Key of
- { _Name, _Kind } -> Key;
- Name when is_atom(Name) -> { Name, nil }
+ {_Name, _Kind} -> Key;
+ Name when is_atom(Name) -> {Name, nil}
end,
InternalName = ?atom_concat(["_@", Counter]),
load_binding(T,
orddict:store(InternalName, Value, Binding),
- orddict:store(Actual, { InternalName, 0 }, Vars), Counter + 1);
+ orddict:store(Actual, {InternalName, 0}, Vars), Counter + 1);
load_binding([], Binding, Vars, Counter) ->
- { Binding, Vars, Counter }.
+ {Binding, Vars, Counter}.
dump_binding(Binding, #elixir_scope{vars=Vars}) ->
dump_binding(Vars, Binding, []).
-dump_binding([{{ Var, Kind } = Key, { InternalName,_ }}|T], Binding, Acc) when is_atom(Kind) ->
+dump_binding([{{Var, Kind} = Key, {InternalName,_}}|T], Binding, Acc) when is_atom(Kind) ->
Actual = case Kind of
nil -> Var;
_ -> Key
diff --git a/lib/elixir/src/elixir_sup.erl b/lib/elixir/src/elixir_sup.erl
index e43709194..80767df73 100644
--- a/lib/elixir/src/elixir_sup.erl
+++ b/lib/elixir/src/elixir_sup.erl
@@ -9,23 +9,23 @@ init(ok) ->
Workers = [
{
elixir_code_server,
- { elixir_code_server, start_link, [] },
+ {elixir_code_server, start_link, []},
permanent, % Restart = permanent | transient | temporary
2000, % Shutdown = brutal_kill | int() >= 0 | infinity
worker, % Type = worker | supervisor
[elixir_code_server] % Modules = [Module] | dynamic
- },
+ },
{
elixir_counter,
- { elixir_counter, start_link, [] },
+ {elixir_counter, start_link, []},
permanent, % Restart = permanent | transient | temporary
2000, % Shutdown = brutal_kill | int() >= 0 | infinity
worker, % Type = worker | supervisor
[elixir_counter] % Modules = [Module] | dynamic
- }
+ }
],
{ok, {{one_for_one, 3, 10}, Workers}}.
diff --git a/lib/elixir/src/elixir_tokenizer.erl b/lib/elixir/src/elixir_tokenizer.erl
index 5257e2e76..0062b2a9a 100644
--- a/lib/elixir/src/elixir_tokenizer.erl
+++ b/lib/elixir/src/elixir_tokenizer.erl
@@ -89,17 +89,17 @@ tokenize(String, Line, #elixir_tokenizer{} = Scope) ->
tokenize(String, Line, Opts) ->
File = case lists:keyfind(file, 1, Opts) of
- { file, V1 } -> V1;
+ {file, V1} -> V1;
false -> <<"nofile">>
end,
Existing = case lists:keyfind(existing_atoms_only, 1, Opts) of
- { existing_atoms_only, true } -> true;
+ {existing_atoms_only, true} -> true;
false -> false
end,
Check = case lists:keyfind(check_terminators, 1, Opts) of
- { check_terminators, false } -> false;
+ {check_terminators, false} -> false;
false -> true
end,
@@ -107,29 +107,29 @@ tokenize(String, Line, Opts) ->
file=File,
existing_atoms_only=Existing,
check_terminators=Check
- }).
+ }).
tokenize([], Line, #elixir_tokenizer{terminators=[]}, Tokens) ->
- { ok, Line, lists:reverse(Tokens) };
+ {ok, Line, lists:reverse(Tokens)};
-tokenize([], EndLine, #elixir_tokenizer{terminators=[{ Start, StartLine }|_]}, Tokens) ->
+tokenize([], EndLine, #elixir_tokenizer{terminators=[{Start, StartLine}|_]}, Tokens) ->
End = terminator(Start),
Message = io_lib:format("missing terminator: ~ts (for \"~ts\" starting at line ~B)", [End, Start, StartLine]),
- { error, { EndLine, Message, [] }, [], Tokens };
+ {error, {EndLine, Message, []}, [], Tokens};
% Base integers
tokenize([$0,X,H|T], Line, Scope, Tokens) when (X == $x orelse X == $X), ?is_hex(H) ->
- { Rest, Number } = tokenize_hex([H|T], []),
- tokenize(Rest, Line, Scope, [{ number, Line, Number }|Tokens]);
+ {Rest, Number} = tokenize_hex([H|T], []),
+ tokenize(Rest, Line, Scope, [{number, Line, Number}|Tokens]);
tokenize([$0,B,H|T], Line, Scope, Tokens) when (B == $b orelse B == $B), ?is_bin(H) ->
- { Rest, Number } = tokenize_bin([H|T], []),
- tokenize(Rest, Line, Scope, [{ number, Line, Number }|Tokens]);
+ {Rest, Number} = tokenize_bin([H|T], []),
+ tokenize(Rest, Line, Scope, [{number, Line, Number}|Tokens]);
tokenize([$0,H|T], Line, Scope, Tokens) when ?is_octal(H) ->
- { Rest, Number } = tokenize_octal([H|T], []),
- tokenize(Rest, Line, Scope, [{ number, Line, Number }|Tokens]);
+ {Rest, Number} = tokenize_octal([H|T], []),
+ tokenize(Rest, Line, Scope, [{number, Line, Number}|Tokens]);
% Comments
@@ -141,19 +141,19 @@ tokenize([$#|String], Line, Scope, Tokens) ->
tokenize([$~,S,H,H,H|T] = Original, Line, Scope, Tokens) when ?is_quote(H), ?is_upcase(S) orelse ?is_downcase(S) ->
case extract_heredoc_with_interpolation(Line, Scope, ?is_downcase(S), T, H) of
- { ok, NewLine, Parts, Rest } ->
- { Final, Modifiers } = collect_modifiers(Rest, []),
- tokenize(Final, NewLine, Scope, [{ sigil, Line, S, Parts, Modifiers }|Tokens]);
- { error, Reason } ->
- { error, Reason, Original, Tokens }
+ {ok, NewLine, Parts, Rest} ->
+ {Final, Modifiers} = collect_modifiers(Rest, []),
+ tokenize(Final, NewLine, Scope, [{sigil, Line, S, Parts, Modifiers}|Tokens]);
+ {error, Reason} ->
+ {error, Reason, Original, Tokens}
end;
tokenize([$~,S,H|T] = Original, Line, Scope, Tokens) when ?is_sigil(H), ?is_upcase(S) orelse ?is_downcase(S) ->
case elixir_interpolation:extract(Line, Scope, ?is_downcase(S), T, sigil_terminator(H)) of
- { NewLine, Parts, Rest } ->
- { Final, Modifiers } = collect_modifiers(Rest, []),
- tokenize(Final, NewLine, Scope, [{ sigil, Line, S, Parts, Modifiers }|Tokens]);
- { error, Reason } ->
+ {NewLine, Parts, Rest} ->
+ {Final, Modifiers} = collect_modifiers(Rest, []),
+ tokenize(Final, NewLine, Scope, [{sigil, Line, S, Parts, Modifiers}|Tokens]);
+ {error, Reason} ->
Sigil = [$~,S,H],
interpolation_error(Reason, Original, Tokens, " (for sigil ~ts starting at line ~B)", [Sigil, Line])
end;
@@ -162,54 +162,54 @@ tokenize([$~,S,H|T] = Original, Line, Scope, Tokens) when ?is_sigil(H), ?is_upca
tokenize([$?,$\\,P,${,A,B,C,D,E,F,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) ->
Char = escape_char([$\\,P,${,A,B,C,D,E,F,$}]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,P,${,A,B,C,D,E,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) ->
Char = escape_char([$\\,P,${,A,B,C,D,E,$}]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,P,${,A,B,C,D,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) ->
Char = escape_char([$\\,P,${,A,B,C,D,$}]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,P,${,A,B,C,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B), ?is_hex(C) ->
Char = escape_char([$\\,P,${,A,B,C,$}]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,P,${,A,B,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B) ->
Char = escape_char([$\\,P,${,A,B,$}]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,P,${,A,$}|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A) ->
Char = escape_char([$\\,P,${,A,$}]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,P,A,B|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A), ?is_hex(B) ->
Char = escape_char([$\\,P,A,B]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,P,A|T], Line, Scope, Tokens) when (P == $x orelse P == $X), ?is_hex(A) ->
Char = escape_char([$\\,P,A]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,A,B,C|T], Line, Scope, Tokens) when ?is_octal(A), A =< $3,?is_octal(B), ?is_octal(C) ->
Char = escape_char([$\\,A,B,C]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,A,B|T], Line, Scope, Tokens) when ?is_octal(A), ?is_octal(B) ->
Char = escape_char([$\\,A,B]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,A|T], Line, Scope, Tokens) when ?is_octal(A) ->
Char = escape_char([$\\,A]),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,$\\,H|T], Line, Scope, Tokens) ->
Char = elixir_interpolation:unescape_map(H),
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
tokenize([$?,Char|T], Line, Scope, Tokens) ->
- tokenize(T, Line, Scope, [{ number, Line, Char }|Tokens]);
+ tokenize(T, Line, Scope, [{number, Line, Char}|Tokens]);
% Heredocs
@@ -230,64 +230,64 @@ tokenize([$'|T], Line, Scope, Tokens) ->
tokenize([$:,H|T] = Original, Line, Scope, Tokens) when ?is_quote(H) ->
case elixir_interpolation:extract(Line, Scope, true, T, H) of
- { NewLine, Parts, Rest } ->
+ {NewLine, Parts, Rest} ->
Unescaped = unescape_tokens(Parts),
ExistingAtomsOnly = Scope#elixir_tokenizer.existing_atoms_only,
- tokenize(Rest, NewLine, Scope, [{ atom_string, Line, ExistingAtomsOnly, Unescaped }|Tokens]);
- { error, Reason } ->
+ tokenize(Rest, NewLine, Scope, [{atom_string, Line, ExistingAtomsOnly, Unescaped}|Tokens]);
+ {error, Reason} ->
interpolation_error(Reason, Original, Tokens, " (for atom starting at line ~B)", [Line])
end;
tokenize([$:,T|String] = Original, Line, Scope, Tokens) when ?is_atom_start(T) ->
- { Rest, Part } = tokenize_atom([T|String], []),
+ {Rest, Part} = tokenize_atom([T|String], []),
case unsafe_to_atom(Part, Line, Scope) of
- { ok, Atom } ->
- tokenize(Rest, Line, Scope, [{ atom, Line, Atom }|Tokens]);
- { error, Reason } ->
- { error, Reason, Original, Tokens }
+ {ok, Atom} ->
+ tokenize(Rest, Line, Scope, [{atom, Line, Atom}|Tokens]);
+ {error, Reason} ->
+ {error, Reason, Original, Tokens}
end;
% %% Special atom identifiers / operators
tokenize(":..." ++ Rest, Line, Scope, Tokens) ->
- tokenize(Rest, Line, Scope, [{ atom, Line, '...' }|Tokens]);
+ tokenize(Rest, Line, Scope, [{atom, Line, '...'}|Tokens]);
tokenize(":<<>>" ++ Rest, Line, Scope, Tokens) ->
- tokenize(Rest, Line, Scope, [{ atom, Line, '<<>>' }|Tokens]);
+ tokenize(Rest, Line, Scope, [{atom, Line, '<<>>'}|Tokens]);
tokenize(":%{}" ++ Rest, Line, Scope, Tokens) ->
- tokenize(Rest, Line, Scope, [{ atom, Line, '%{}' }|Tokens]);
+ tokenize(Rest, Line, Scope, [{atom, Line, '%{}'}|Tokens]);
tokenize(":%" ++ Rest, Line, Scope, Tokens) ->
- tokenize(Rest, Line, Scope, [{ atom, Line, '%' }|Tokens]);
+ tokenize(Rest, Line, Scope, [{atom, Line, '%'}|Tokens]);
tokenize([$:,T1,T2|Rest], Line, Scope, Tokens) when ?container(T1, T2) ->
- tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T1,T2]) }|Tokens]);
+ tokenize(Rest, Line, Scope, [{atom, Line, list_to_atom([T1,T2])}|Tokens]);
tokenize("...:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) ->
- tokenize(Rest, Line, Scope, [{ kw_identifier, Line, '...' }|Tokens]);
+ tokenize(Rest, Line, Scope, [{kw_identifier, Line, '...'}|Tokens]);
tokenize("<<>>:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) ->
- tokenize(Rest, Line, Scope, [{ kw_identifier, Line, '<<>>' }|Tokens]);
+ tokenize(Rest, Line, Scope, [{kw_identifier, Line, '<<>>'}|Tokens]);
tokenize("%{}:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) ->
- tokenize(Rest, Line, Scope, [{ kw_identifier, Line, '%{}' }|Tokens]);
+ tokenize(Rest, Line, Scope, [{kw_identifier, Line, '%{}'}|Tokens]);
tokenize("%:" ++ Rest, Line, Scope, Tokens) when ?is_space(hd(Rest)) ->
- tokenize(Rest, Line, Scope, [{ kw_identifier, Line, '%' }|Tokens]);
+ tokenize(Rest, Line, Scope, [{kw_identifier, Line, '%'}|Tokens]);
tokenize([T1,T2,$:|Rest], Line, Scope, Tokens) when ?container(T1, T2), ?is_space(hd(Rest)) ->
- tokenize(Rest, Line, Scope, [{ kw_identifier, Line, list_to_atom([T1,T2]) }|Tokens]);
+ tokenize(Rest, Line, Scope, [{kw_identifier, Line, list_to_atom([T1,T2])}|Tokens]);
% ## Three Token Operators
tokenize([$:,T1,T2,T3|Rest], Line, Scope, Tokens) when
?unary_op3(T1, T2, T3); ?comp_op3(T1, T2, T3); ?and_op3(T1, T2, T3); ?or_op3(T1, T2, T3);
?arrow_op3(T1, T2, T3); ?exp_op3(T1, T2, T3) ->
- tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T1,T2,T3]) }|Tokens]);
+ tokenize(Rest, Line, Scope, [{atom, Line, list_to_atom([T1,T2,T3])}|Tokens]);
% ## Two Token Operators
tokenize([$:,T1,T2|Rest], Line, Scope, Tokens) when
?comp_op2(T1, T2); ?and_op(T1, T2); ?or_op(T1, T2); ?arrow_op(T1, T2);
?in_match_op(T1, T2); ?two_op(T1, T2); ?stab_op(T1, T2); ?type_op(T1, T2) ->
- tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T1,T2]) }|Tokens]);
+ tokenize(Rest, Line, Scope, [{atom, Line, list_to_atom([T1,T2])}|Tokens]);
% ## Single Token Operators
tokenize([$:,T|Rest], Line, Scope, Tokens) when
?at_op(T); ?unary_op(T); ?dual_op(T); ?mult_op(T); ?comp_op(T);
?match_op(T); ?pipe_op(T); T == $. ->
- tokenize(Rest, Line, Scope, [{ atom, Line, list_to_atom([T]) }|Tokens]);
+ tokenize(Rest, Line, Scope, [{atom, Line, list_to_atom([T])}|Tokens]);
% End of line
@@ -336,16 +336,16 @@ tokenize([T1,T2,T3|Rest], Line, Scope, Tokens) when ?exp_op3(T1, T2, T3) ->
% ## Containers + punctuation tokens
tokenize([T,T|Rest], Line, Scope, Tokens) when T == $<; T == $> ->
- Token = { list_to_atom([T,T]), Line },
+ Token = {list_to_atom([T,T]), Line},
handle_terminator(Rest, Line, Scope, Token, Tokens);
tokenize([T|Rest], Line, Scope, Tokens) when T == $(;
T == ${; T == $}; T == $[; T == $]; T == $); T == $, ->
- Token = { list_to_atom([T]), Line },
+ Token = {list_to_atom([T]), Line},
handle_terminator(Rest, Line, Scope, Token, Tokens);
tokenize("=>" ++ Rest, Line, Scope, Tokens) ->
- tokenize(Rest, Line, Scope, add_token_with_nl({ assoc_op, Line, '=>' }, Tokens));
+ tokenize(Rest, Line, Scope, add_token_with_nl({assoc_op, Line, '=>'}, Tokens));
% ## Two Token Operators
tokenize([T1,T2|Rest], Line, Scope, Tokens) when ?two_op(T1, T2) ->
@@ -399,65 +399,65 @@ tokenize([T|Rest], Line, Scope, Tokens) when ?pipe_op(T) ->
tokenize([$%|T], Line, Scope, Tokens) ->
case strip_space(T, 0) of
- { [${|_] = Rest, Counter } -> tokenize(Rest, Line + Counter, Scope, [{ '%{}', Line }|Tokens]);
- { Rest, Counter } -> tokenize(Rest, Line + Counter, Scope, [{ '%', Line }|Tokens])
+ {[${|_] = Rest, Counter} -> tokenize(Rest, Line + Counter, Scope, [{'%{}', Line}|Tokens]);
+ {Rest, Counter} -> tokenize(Rest, Line + Counter, Scope, [{'%', Line}|Tokens])
end;
tokenize([$.|T], Line, Scope, Tokens) ->
- { Rest, Counter } = strip_space(T, 0),
+ {Rest, Counter} = strip_space(T, 0),
handle_dot([$.|Rest], Line + Counter, Scope, Tokens);
% Integers and floats
tokenize([H|_] = String, Line, Scope, Tokens) when ?is_digit(H) ->
- { Rest, Number } = tokenize_number(String, [], false),
- tokenize(Rest, Line, Scope, [{ number, Line, Number }|Tokens]);
+ {Rest, Number} = tokenize_number(String, [], false),
+ tokenize(Rest, Line, Scope, [{number, Line, Number}|Tokens]);
% Aliases
tokenize([H|_] = Original, Line, Scope, Tokens) when ?is_upcase(H) ->
- { Rest, Alias } = tokenize_identifier(Original, []),
+ {Rest, Alias} = tokenize_identifier(Original, []),
case unsafe_to_atom(Alias, Line, Scope) of
- { ok, Atom } ->
+ {ok, Atom} ->
case Rest of
[$:|T] when ?is_space(hd(T)) ->
- tokenize(T, Line, Scope, [{ kw_identifier, Line, Atom }|Tokens]);
+ tokenize(T, Line, Scope, [{kw_identifier, Line, Atom}|Tokens]);
_ ->
- tokenize(Rest, Line, Scope, [{ aliases, Line, [Atom] }|Tokens])
+ tokenize(Rest, Line, Scope, [{aliases, Line, [Atom]}|Tokens])
end;
- { error, Reason } ->
- { error, Reason, Original, Tokens }
+ {error, Reason} ->
+ {error, Reason, Original, Tokens}
end;
% Identifier
tokenize([H|_] = String, Line, Scope, Tokens) when ?is_downcase(H); H == $_ ->
case tokenize_any_identifier(String, Line, Scope, Tokens) of
- { keyword, Rest, Check, T } ->
+ {keyword, Rest, Check, T} ->
handle_terminator(Rest, Line, Scope, Check, T);
- { identifier, Rest, Token } ->
+ {identifier, Rest, Token} ->
tokenize(Rest, Line, Scope, [Token|Tokens]);
- { error, _, _, _ } = Error ->
+ {error, _, _, _} = Error ->
Error
end;
% Ambiguous unary/binary operators tokens
-tokenize([Space, Sign, NotMarker|T], Line, Scope, [{ Identifier, _, _ } = H|Tokens]) when
+tokenize([Space, Sign, NotMarker|T], Line, Scope, [{Identifier, _, _} = H|Tokens]) when
?dual_op(Sign),
?is_horizontal_space(Space),
not(?is_space(NotMarker)),
NotMarker /= $(, NotMarker /= $+, NotMarker /= $-, NotMarker /= $>,
Identifier == identifier ->
Rest = [NotMarker|T],
- tokenize(Rest, Line, Scope, [{ dual_op, Line, list_to_atom([Sign]) }, setelement(1, H, op_identifier)|Tokens]);
+ tokenize(Rest, Line, Scope, [{dual_op, Line, list_to_atom([Sign])}, setelement(1, H, op_identifier)|Tokens]);
% Spaces
tokenize([T|Rest], Line, Scope, Tokens) when ?is_horizontal_space(T) ->
tokenize(strip_horizontal_space(Rest), Line, Scope, Tokens);
tokenize(T, Line, _Scope, Tokens) ->
- { error, { Line, "invalid token: ", until_eol(T) }, T, Tokens }.
+ {error, {Line, "invalid token: ", until_eol(T)}, T, Tokens}.
strip_horizontal_space([H|T]) when ?is_horizontal_space(H) ->
strip_horizontal_space(T);
@@ -468,7 +468,7 @@ strip_space(T, Counter) ->
case strip_horizontal_space(T) of
"\r\n" ++ Rest -> strip_space(Rest, Counter + 1);
"\n" ++ Rest -> strip_space(Rest, Counter + 1);
- Rest -> { Rest, Counter }
+ Rest -> {Rest, Counter}
end.
until_eol("\r\n" ++ _) -> [];
@@ -484,42 +484,42 @@ escape_char(List) ->
handle_heredocs(T, Line, H, Scope, Tokens) ->
case extract_heredoc_with_interpolation(Line, Scope, true, T, H) of
- { ok, NewLine, Parts, Rest } ->
- Token = { string_type(H), Line, unescape_tokens(Parts) },
+ {ok, NewLine, Parts, Rest} ->
+ Token = {string_type(H), Line, unescape_tokens(Parts)},
tokenize(Rest, NewLine, Scope, [Token|Tokens]);
- { error, Reason } ->
- { error, Reason, [H, H, H] ++ T, Tokens }
+ {error, Reason} ->
+ {error, Reason, [H, H, H] ++ T, Tokens}
end.
handle_strings(T, Line, H, Scope, Tokens) ->
case elixir_interpolation:extract(Line, Scope, true, T, H) of
- { error, Reason } ->
+ {error, Reason} ->
interpolation_error(Reason, [H|T], Tokens, " (for string starting at line ~B)", [Line]);
- { NewLine, Parts, [$:|Rest] } when ?is_space(hd(Rest)) ->
+ {NewLine, Parts, [$:|Rest]} when ?is_space(hd(Rest)) ->
Unescaped = unescape_tokens(Parts),
ExistingAtomsOnly = Scope#elixir_tokenizer.existing_atoms_only,
- tokenize(Rest, NewLine, Scope, [{ kw_identifier_string, Line, ExistingAtomsOnly, Unescaped }|Tokens]);
- { NewLine, Parts, Rest } ->
- Token = { string_type(H), Line, unescape_tokens(Parts) },
+ tokenize(Rest, NewLine, Scope, [{kw_identifier_string, Line, ExistingAtomsOnly, Unescaped}|Tokens]);
+ {NewLine, Parts, Rest} ->
+ Token = {string_type(H), Line, unescape_tokens(Parts)},
tokenize(Rest, NewLine, Scope, [Token|Tokens])
end.
handle_unary_op([$:|Rest], Line, _Kind, Op, Scope, Tokens) when ?is_space(hd(Rest)) ->
- tokenize(Rest, Line, Scope, [{ kw_identifier, Line, Op }|Tokens]);
+ tokenize(Rest, Line, Scope, [{kw_identifier, Line, Op}|Tokens]);
handle_unary_op(Rest, Line, Kind, Op, Scope, Tokens) ->
case strip_horizontal_space(Rest) of
- [$/|_] -> tokenize(Rest, Line, Scope, [{ identifier, Line, Op }|Tokens]);
- _ -> tokenize(Rest, Line, Scope, [{ Kind, Line, Op }|Tokens])
+ [$/|_] -> tokenize(Rest, Line, Scope, [{identifier, Line, Op}|Tokens]);
+ _ -> tokenize(Rest, Line, Scope, [{Kind, Line, Op}|Tokens])
end.
handle_op([$:|Rest], Line, _Kind, Op, Scope, Tokens) when ?is_space(hd(Rest)) ->
- tokenize(Rest, Line, Scope, [{ kw_identifier, Line, Op }|Tokens]);
+ tokenize(Rest, Line, Scope, [{kw_identifier, Line, Op}|Tokens]);
handle_op(Rest, Line, Kind, Op, Scope, Tokens) ->
case strip_horizontal_space(Rest) of
- [$/|_] -> tokenize(Rest, Line, Scope, [{ identifier, Line, Op }|Tokens]);
- _ -> tokenize(Rest, Line, Scope, add_token_with_nl({ Kind, Line, Op }, Tokens))
+ [$/|_] -> tokenize(Rest, Line, Scope, [{identifier, Line, Op}|Tokens]);
+ _ -> tokenize(Rest, Line, Scope, add_token_with_nl({Kind, Line, Op}, Tokens))
end.
% ## Three Token Operators
@@ -542,28 +542,28 @@ handle_dot([$.,T|Rest], Line, Scope, Tokens) when
% ## Exception for .( as it needs to be treated specially in the parser
handle_dot([$.,$(|Rest], Line, Scope, Tokens) ->
- tokenize([$(|Rest], Line, Scope, add_token_with_nl({ dot_call_op, Line, '.' }, Tokens));
+ tokenize([$(|Rest], Line, Scope, add_token_with_nl({dot_call_op, Line, '.'}, Tokens));
handle_dot([$.,H|T] = Original, Line, Scope, Tokens) when ?is_quote(H) ->
case elixir_interpolation:extract(Line, Scope, true, T, H) of
- { NewLine, [Part], Rest } when is_binary(Part) ->
+ {NewLine, [Part], Rest} when is_binary(Part) ->
case unsafe_to_atom(Part, Line, Scope) of
- { ok, Atom } ->
+ {ok, Atom} ->
Token = check_call_identifier(identifier, Line, Atom, Rest),
- tokenize(Rest, NewLine, Scope, [Token|add_token_with_nl({ '.', Line }, Tokens)]);
- { error, Reason } ->
- { error, Reason, Original, Tokens }
+ tokenize(Rest, NewLine, Scope, [Token|add_token_with_nl({'.', Line}, Tokens)]);
+ {error, Reason} ->
+ {error, Reason, Original, Tokens}
end;
- { error, Reason } ->
+ {error, Reason} ->
interpolation_error(Reason, Original, Tokens, " (for function name starting at line ~B)", [Line])
end;
handle_dot([$.|Rest], Line, Scope, Tokens) ->
- tokenize(Rest, Line, Scope, add_token_with_nl({ '.', Line }, Tokens)).
+ tokenize(Rest, Line, Scope, add_token_with_nl({'.', Line}, Tokens)).
handle_call_identifier(Rest, Line, Op, Scope, Tokens) ->
Token = check_call_identifier(identifier, Line, Op, Rest),
- tokenize(Rest, Line, Scope, [Token|add_token_with_nl({ '.', Line }, Tokens)]).
+ tokenize(Rest, Line, Scope, [Token|add_token_with_nl({'.', Line}, Tokens)]).
%% Helpers
@@ -574,55 +574,55 @@ eol(Line, Mod, Tokens) -> [{eol,Line,Mod}|Tokens].
unsafe_to_atom(Part, Line, #elixir_tokenizer{}) when
is_binary(Part) andalso size(Part) > 255;
is_list(Part) andalso length(Part) > 255 ->
- { error, { Line, "atom length must be less than system limit", ":" } };
+ {error, {Line, "atom length must be less than system limit", ":"}};
unsafe_to_atom(Binary, _Line, #elixir_tokenizer{existing_atoms_only=true}) when is_binary(Binary) ->
- { ok, binary_to_existing_atom(Binary, utf8) };
+ {ok, binary_to_existing_atom(Binary, utf8)};
unsafe_to_atom(Binary, _Line, #elixir_tokenizer{}) when is_binary(Binary) ->
- { ok, binary_to_atom(Binary, utf8) };
+ {ok, binary_to_atom(Binary, utf8)};
unsafe_to_atom(List, _Line, #elixir_tokenizer{existing_atoms_only=true}) when is_list(List) ->
- { ok, list_to_existing_atom(List) };
+ {ok, list_to_existing_atom(List)};
unsafe_to_atom(List, _Line, #elixir_tokenizer{}) when is_list(List) ->
- { ok, list_to_atom(List) }.
+ {ok, list_to_atom(List)}.
collect_modifiers([H|T], Buffer) when ?is_downcase(H) ->
collect_modifiers(T, [H|Buffer]);
collect_modifiers(Rest, Buffer) ->
- { Rest, lists:reverse(Buffer) }.
+ {Rest, lists:reverse(Buffer)}.
%% Heredocs
extract_heredoc_with_interpolation(Line, Scope, Interpol, T, H) ->
case extract_heredoc(Line, T, H) of
- { ok, NewLine, Body, Rest } ->
+ {ok, NewLine, Body, Rest} ->
case elixir_interpolation:extract(Line + 1, Scope, Interpol, Body, 0) of
- { error, Reason } ->
- { error, interpolation_format(Reason, " (for heredoc starting at line ~B)", [Line]) };
- { _, Parts, [] } ->
- { ok, NewLine, Parts, Rest }
+ {error, Reason} ->
+ {error, interpolation_format(Reason, " (for heredoc starting at line ~B)", [Line])};
+ {_, Parts, []} ->
+ {ok, NewLine, Parts, Rest}
end;
- { error, _ } = Error ->
+ {error, _} = Error ->
Error
end.
extract_heredoc(Line0, Rest0, Marker) ->
case extract_heredoc_header(Rest0) of
- { ok, Rest1 } ->
+ {ok, Rest1} ->
%% We prepend a new line so we can transparently remove
%% spaces later. This new line is removed by calling `tl`
%% in the final heredoc body three lines below.
case extract_heredoc_body(Line0, Marker, [$\n|Rest1], []) of
- { ok, Line1, Body, Rest2, Spaces } ->
- { ok, Line1, tl(remove_heredoc_spaces(Body, Spaces)), Rest2 };
- { error, ErrorLine } ->
+ {ok, Line1, Body, Rest2, Spaces} ->
+ {ok, Line1, tl(remove_heredoc_spaces(Body, Spaces)), Rest2};
+ {error, ErrorLine} ->
Terminator = [Marker, Marker, Marker],
Message = "missing terminator: ~ts (for heredoc starting at line ~B)",
- { error, { ErrorLine, io_lib:format(Message, [Terminator, Line0]), [] } }
+ {error, {ErrorLine, io_lib:format(Message, [Terminator, Line0]), []}}
end;
error ->
Terminator = [Marker, Marker, Marker],
Message = "heredoc start ~ts must be followed by a new line",
- { error, { Line0, io_lib:format(Message, [Terminator]), [] } }
+ {error, {Line0, io_lib:format(Message, [Terminator]), []}}
end.
%% Remove spaces from heredoc based on the position of the final quotes.
@@ -643,9 +643,9 @@ remove_heredoc_spaces([], Buffer, _Spaces, _Original) ->
%% Extract the heredoc header.
extract_heredoc_header("\r\n" ++ Rest) ->
- { ok, Rest };
+ {ok, Rest};
extract_heredoc_header("\n" ++ Rest) ->
- { ok, Rest };
+ {ok, Rest};
extract_heredoc_header([_|T]) ->
extract_heredoc_header(T);
extract_heredoc_header(_) ->
@@ -657,31 +657,31 @@ extract_heredoc_header(_) ->
extract_heredoc_body(Line, Marker, Rest, Buffer) ->
case extract_heredoc_line(Marker, Rest, Buffer, 0) of
- { ok, NewBuffer, NewRest } ->
+ {ok, NewBuffer, NewRest} ->
extract_heredoc_body(Line + 1, Marker, NewRest, NewBuffer);
- { ok, NewBuffer, NewRest, Spaces } ->
- { ok, Line, NewBuffer, NewRest, Spaces };
- { error, eof } ->
- { error, Line }
+ {ok, NewBuffer, NewRest, Spaces} ->
+ {ok, Line, NewBuffer, NewRest, Spaces};
+ {error, eof} ->
+ {error, Line}
end.
%% Extract a line from the heredoc prepending its contents to a buffer.
extract_heredoc_line("\r\n" ++ Rest, Buffer) ->
- { ok, [$\n|Buffer], Rest };
+ {ok, [$\n|Buffer], Rest};
extract_heredoc_line("\n" ++ Rest, Buffer) ->
- { ok, [$\n|Buffer], Rest };
+ {ok, [$\n|Buffer], Rest};
extract_heredoc_line([H|T], Buffer) ->
extract_heredoc_line(T, [H|Buffer]);
extract_heredoc_line(_, _) ->
- { error, eof }.
+ {error, eof}.
%% Extract each heredoc line trying to find a match according to the marker.
extract_heredoc_line(Marker, [H|T], Buffer, Counter) when ?is_horizontal_space(H) ->
extract_heredoc_line(Marker, T, [H|Buffer], Counter + 1);
extract_heredoc_line(Marker, [Marker,Marker,Marker|T], Buffer, Counter) ->
- { ok, Buffer, T, Counter };
+ {ok, Buffer, T, Counter};
extract_heredoc_line(_Marker, Rest, Buffer, _Counter) ->
extract_heredoc_line(Rest, Buffer).
@@ -710,20 +710,20 @@ tokenize_number([H|T], Acc, Bool) when ?is_digit(H) ->
%% Cast to float...
tokenize_number(Rest, Acc, true) ->
- { Rest, list_to_float(lists:reverse(Acc)) };
+ {Rest, list_to_float(lists:reverse(Acc))};
%% Or integer.
tokenize_number(Rest, Acc, false) ->
- { Rest, list_to_integer(lists:reverse(Acc)) }.
+ {Rest, list_to_integer(lists:reverse(Acc))}.
tokenize_hex([H|T], Acc) when ?is_hex(H) -> tokenize_hex(T, [H|Acc]);
-tokenize_hex(Rest, Acc) -> { Rest, list_to_integer(lists:reverse(Acc), 16) }.
+tokenize_hex(Rest, Acc) -> {Rest, list_to_integer(lists:reverse(Acc), 16)}.
tokenize_octal([H|T], Acc) when ?is_octal(H) -> tokenize_octal(T, [H|Acc]);
-tokenize_octal(Rest, Acc) -> { Rest, list_to_integer(lists:reverse(Acc), 8) }.
+tokenize_octal(Rest, Acc) -> {Rest, list_to_integer(lists:reverse(Acc), 8)}.
tokenize_bin([H|T], Acc) when ?is_bin(H) -> tokenize_bin(T, [H|Acc]);
-tokenize_bin(Rest, Acc) -> { Rest, list_to_integer(lists:reverse(Acc), 2) }.
+tokenize_bin(Rest, Acc) -> {Rest, list_to_integer(lists:reverse(Acc), 2)}.
%% Comments
@@ -739,10 +739,10 @@ tokenize_atom([H|T], Acc) when ?is_atom(H) ->
tokenize_atom(T, [H|Acc]);
tokenize_atom([H|T], Acc) when H == $?; H == $! ->
- { T, lists:reverse([H|Acc]) };
+ {T, lists:reverse([H|Acc])};
tokenize_atom(Rest, Acc) ->
- { Rest, lists:reverse(Acc) }.
+ {Rest, lists:reverse(Acc)}.
%% Identifiers
%% At this point, the validity of the first character was already verified.
@@ -751,49 +751,49 @@ tokenize_identifier([H|T], Acc) when ?is_identifier(H) ->
tokenize_identifier(T, [H|Acc]);
tokenize_identifier(Rest, Acc) ->
- { Rest, lists:reverse(Acc) }.
+ {Rest, lists:reverse(Acc)}.
%% Tokenize any identifier, handling kv, punctuated, paren, bracket and do identifiers.
tokenize_any_identifier(Original, Line, Scope, Tokens) ->
- { Rest, Identifier } = tokenize_identifier(Original, []),
+ {Rest, Identifier} = tokenize_identifier(Original, []),
- { AllIdentifier, AllRest } =
+ {AllIdentifier, AllRest} =
case Rest of
- [H|T] when H == $?; H == $! -> { Identifier ++ [H], T };
- _ -> { Identifier, Rest }
+ [H|T] when H == $?; H == $! -> {Identifier ++ [H], T};
+ _ -> {Identifier, Rest}
end,
case unsafe_to_atom(AllIdentifier, Line, Scope) of
- { ok, Atom } ->
+ {ok, Atom} ->
tokenize_kw_or_other(AllRest, identifier, Line, Atom, Tokens);
- { error, Reason } ->
- { error, Reason, Original, Tokens }
+ {error, Reason} ->
+ {error, Reason, Original, Tokens}
end.
tokenize_kw_or_other([$:,H|T], _Kind, Line, Atom, _Tokens) when ?is_space(H) ->
- { identifier, [H|T], { kw_identifier, Line, Atom } };
+ {identifier, [H|T], {kw_identifier, Line, Atom}};
tokenize_kw_or_other([$:,H|T], _Kind, Line, Atom, Tokens) when ?is_atom_start(H); ?is_digit(H) ->
Original = atom_to_list(Atom) ++ [$:],
- Reason = { Line, "keyword argument must be followed by space after: ", Original },
- { error, Reason, Original ++ [H|T], Tokens };
+ Reason = {Line, "keyword argument must be followed by space after: ", Original},
+ {error, Reason, Original ++ [H|T], Tokens};
tokenize_kw_or_other(Rest, Kind, Line, Atom, Tokens) ->
case check_keyword(Line, Atom, Tokens) of
nomatch ->
- { identifier, Rest, check_call_identifier(Kind, Line, Atom, Rest) };
- { ok, [Check|T] } ->
- { keyword, Rest, Check, T };
- { error, Token } ->
- { error, { Line, "syntax error before: ", Token }, atom_to_list(Atom) ++ Rest, Tokens }
+ {identifier, Rest, check_call_identifier(Kind, Line, Atom, Rest)};
+ {ok, [Check|T]} ->
+ {keyword, Rest, Check, T};
+ {error, Token} ->
+ {error, {Line, "syntax error before: ", Token}, atom_to_list(Atom) ++ Rest, Tokens}
end.
%% Check if it is a call identifier (paren | bracket | do)
-check_call_identifier(_Kind, Line, Atom, [$(|_]) -> { paren_identifier, Line, Atom };
-check_call_identifier(_Kind, Line, Atom, [$[|_]) -> { bracket_identifier, Line, Atom };
-check_call_identifier(Kind, Line, Atom, _Rest) -> { Kind, Line, Atom }.
+check_call_identifier(_Kind, Line, Atom, [$(|_]) -> {paren_identifier, Line, Atom};
+check_call_identifier(_Kind, Line, Atom, [$[|_]) -> {bracket_identifier, Line, Atom};
+check_call_identifier(Kind, Line, Atom, _Rest) -> {Kind, Line, Atom}.
add_token_with_nl(Left, [{eol,_,newline}|T]) -> [Left|T];
add_token_with_nl(Left, T) -> [Left|T].
@@ -801,19 +801,19 @@ add_token_with_nl(Left, T) -> [Left|T].
%% Error handling
interpolation_error(Reason, Rest, Tokens, Extension, Args) ->
- { error, interpolation_format(Reason, Extension, Args), Rest, Tokens }.
+ {error, interpolation_format(Reason, Extension, Args), Rest, Tokens}.
-interpolation_format({ string, Line, Message, Token }, Extension, Args) ->
- { Line, io_lib:format("~ts" ++ Extension, [Message|Args]), Token };
-interpolation_format({ _, _, _ } = Reason, _Extension, _Args) ->
+interpolation_format({string, Line, Message, Token}, Extension, Args) ->
+ {Line, io_lib:format("~ts" ++ Extension, [Message|Args]), Token};
+interpolation_format({_, _, _} = Reason, _Extension, _Args) ->
Reason.
%% Terminators
handle_terminator(Rest, Line, Scope, Token, Tokens) ->
case handle_terminator(Token, Scope) of
- { error, Reason } ->
- { error, Reason, atom_to_list(element(1, Token)) ++ Rest, Tokens };
+ {error, Reason} ->
+ {error, Reason, atom_to_list(element(1, Token)) ++ Rest, Tokens};
New ->
tokenize(Rest, Line, New, [Token|Tokens])
end.
@@ -822,14 +822,14 @@ handle_terminator(_, #elixir_tokenizer{check_terminators=false} = Scope) ->
Scope;
handle_terminator(Token, #elixir_tokenizer{terminators=Terminators} = Scope) ->
case check_terminator(Token, Terminators) of
- { error, _ } = Error -> Error;
+ {error, _} = Error -> Error;
New -> Scope#elixir_tokenizer{terminators=New}
end.
-check_terminator({ S, Line }, Terminators) when S == 'fn' ->
- [{ fn, Line }|Terminators];
+check_terminator({S, Line}, Terminators) when S == 'fn' ->
+ [{fn, Line}|Terminators];
-check_terminator({ S, _ } = New, Terminators) when
+check_terminator({S, _} = New, Terminators) when
S == 'do';
S == '(';
S == '[';
@@ -837,7 +837,7 @@ check_terminator({ S, _ } = New, Terminators) when
S == '<<' ->
[New|Terminators];
-check_terminator({ E, _ }, [{ S, _ }|Terminators]) when
+check_terminator({E, _}, [{S, _}|Terminators]) when
S == 'do', E == 'end';
S == 'fn', E == 'end';
S == '(', E == ')';
@@ -846,16 +846,16 @@ check_terminator({ E, _ }, [{ S, _ }|Terminators]) when
S == '<<', E == '>>' ->
Terminators;
-check_terminator({ E, Line }, [{ Start, StartLine }|_]) when
+check_terminator({E, Line}, [{Start, StartLine}|_]) when
E == 'end'; E == ')'; E == ']'; E == '}'; E == '>>' ->
End = terminator(Start),
Message = io_lib:format("\"~ts\" starting at line ~B is missing terminator \"~ts\". "
"Unexpected token: ", [Start, StartLine, End]),
- { error, { Line, Message, atom_to_list(E) } };
+ {error, {Line, Message, atom_to_list(E)}};
-check_terminator({ E, Line }, []) when
+check_terminator({E, Line}, []) when
E == 'end'; E == ')'; E == ']'; E == '}'; E == '>>' ->
- { error, { Line, "unexpected token: ", atom_to_list(E) } };
+ {error, {Line, "unexpected token: ", atom_to_list(E)}};
check_terminator(_, Terminators) ->
Terminators.
@@ -878,26 +878,26 @@ terminator('<<') -> '>>'.
%% Keywords checking
-check_keyword(_Line, _Atom, [{ '.', _ }|_]) ->
+check_keyword(_Line, _Atom, [{'.', _}|_]) ->
nomatch;
-check_keyword(DoLine, do, [{ Identifier, Line, Atom }|T]) when Identifier == identifier ->
- { ok, add_token_with_nl({ do, DoLine }, [{ do_identifier, Line, Atom }|T]) };
+check_keyword(DoLine, do, [{Identifier, Line, Atom}|T]) when Identifier == identifier ->
+ {ok, add_token_with_nl({do, DoLine}, [{do_identifier, Line, Atom}|T])};
check_keyword(Line, do, Tokens) ->
case do_keyword_valid(Tokens) of
- true -> { ok, add_token_with_nl({ do, Line }, Tokens) };
- false -> { error, "do" }
+ true -> {ok, add_token_with_nl({do, Line}, Tokens)};
+ false -> {error, "do"}
end;
check_keyword(Line, Atom, Tokens) ->
case keyword(Atom) of
false -> nomatch;
- token -> { ok, [{ Atom, Line }|Tokens] };
- block -> { ok, [{ block_identifier, Line, Atom }|Tokens] };
- unary_op -> { ok, [{ unary_op, Line, Atom }|Tokens] };
- Kind -> { ok, add_token_with_nl({ Kind, Line, Atom }, Tokens) }
+ token -> {ok, [{Atom, Line}|Tokens]};
+ block -> {ok, [{block_identifier, Line, Atom}|Tokens]};
+ unary_op -> {ok, [{unary_op, Line, Atom}|Tokens]};
+ Kind -> {ok, add_token_with_nl({Kind, Line, Atom}, Tokens)}
end.
%% do is only valid after the end, true, false and nil keywords
-do_keyword_valid([{ Atom, _ }|_]) ->
+do_keyword_valid([{Atom, _}|_]) ->
case Atom of
'end' -> true;
nil -> true;
diff --git a/lib/elixir/src/elixir_translator.erl b/lib/elixir/src/elixir_translator.erl
index c07f62782..958cfb400 100644
--- a/lib/elixir/src/elixir_translator.erl
+++ b/lib/elixir/src/elixir_translator.erl
@@ -8,66 +8,66 @@
%% =
-translate({ '=', Meta, [Left, Right] }, S) ->
+translate({'=', Meta, [Left, Right]}, S) ->
Return = case Left of
- { '_', _, Atom } when is_atom(Atom) -> false;
+ {'_', _, Atom} when is_atom(Atom) -> false;
_ -> true
end,
- { TRight, SR } = translate_block(Right, Return, S),
- { TLeft, SL } = elixir_clauses:match(fun translate/2, Left, SR),
- { { match, ?line(Meta), TLeft, TRight }, SL };
+ {TRight, SR} = translate_block(Right, Return, S),
+ {TLeft, SL} = elixir_clauses:match(fun translate/2, Left, SR),
+ {{match, ?line(Meta), TLeft, TRight}, SL};
%% Containers
-translate({ '{}', Meta, Args }, S) when is_list(Args) ->
- { TArgs, SE } = translate_args(Args, S),
- { { tuple, ?line(Meta), TArgs }, SE };
+translate({'{}', Meta, Args}, S) when is_list(Args) ->
+ {TArgs, SE} = translate_args(Args, S),
+ {{tuple, ?line(Meta), TArgs}, SE};
-translate({ '%{}', Meta, Args }, S) when is_list(Args) ->
+translate({'%{}', Meta, Args}, S) when is_list(Args) ->
elixir_map:translate_map(Meta, Args, S);
-translate({ '%', Meta, [Left, Right] }, S) ->
+translate({'%', Meta, [Left, Right]}, S) ->
elixir_map:translate_struct(Meta, Left, Right, S);
-translate({ '<<>>', Meta, Args }, S) when is_list(Args) ->
+translate({'<<>>', Meta, Args}, S) when is_list(Args) ->
elixir_bitstring:translate(Meta, Args, S);
%% Blocks
-translate({ '__block__', Meta, Args }, #elixir_scope{return=Return} = S) when is_list(Args) ->
- { TArgs, SA } = translate_block(Args, [], Return, S#elixir_scope{return=true}),
- { { block, ?line(Meta), TArgs }, SA };
+translate({'__block__', Meta, Args}, #elixir_scope{return=Return} = S) when is_list(Args) ->
+ {TArgs, SA} = translate_block(Args, [], Return, S#elixir_scope{return=true}),
+ {{block, ?line(Meta), TArgs}, SA};
%% Erlang op
-translate({ '__op__', Meta, [Op, Expr] }, S) when is_atom(Op) ->
- { TExpr, NS } = translate(Expr, S),
- { { op, ?line(Meta), Op, TExpr }, NS };
+translate({'__op__', Meta, [Op, Expr]}, S) when is_atom(Op) ->
+ {TExpr, NS} = translate(Expr, S),
+ {{op, ?line(Meta), Op, TExpr}, NS};
-translate({ '__op__', Meta, [Op, Left, Right] }, S) when is_atom(Op) ->
- { [TLeft, TRight], NS } = translate_args([Left, Right], S),
- { { op, ?line(Meta), Op, TLeft, TRight }, NS };
+translate({'__op__', Meta, [Op, Left, Right]}, S) when is_atom(Op) ->
+ {[TLeft, TRight], NS} = translate_args([Left, Right], S),
+ {{op, ?line(Meta), Op, TLeft, TRight}, NS};
%% Lexical
-translate({ Lexical, _, [_, _] }, S) when Lexical == import; Lexical == alias; Lexical == require ->
- { { atom, 0, nil }, S };
+translate({Lexical, _, [_, _]}, S) when Lexical == import; Lexical == alias; Lexical == require ->
+ {{atom, 0, nil}, S};
%% Pseudo variables
-translate({ '__CALLER__', Meta, Atom }, S) when is_atom(Atom) ->
- { { var, ?line(Meta), '__CALLER__' }, S#elixir_scope{caller=true} };
+translate({'__CALLER__', Meta, Atom}, S) when is_atom(Atom) ->
+ {{var, ?line(Meta), '__CALLER__'}, S#elixir_scope{caller=true}};
%% Functions
-translate({ '&', Meta, [{ '/', [], [{ Fun, [], Atom }, Arity] }] }, S)
+translate({'&', Meta, [{'/', [], [{Fun, [], Atom}, Arity]}]}, S)
when is_atom(Fun), is_atom(Atom), is_integer(Arity) ->
- { { 'fun', ?line(Meta), { function, Fun, Arity } }, S };
-translate({ '&', Meta, [Arg] }, S) when is_integer(Arg) ->
+ {{'fun', ?line(Meta), {function, Fun, Arity}}, S};
+translate({'&', Meta, [Arg]}, S) when is_integer(Arg) ->
compile_error(Meta, S#elixir_scope.file, "unhandled &~B outside of a capture", [Arg]);
-translate({ fn, Meta, Clauses }, S) ->
+translate({fn, Meta, Clauses}, S) ->
elixir_fn:translate(Meta, Clauses, S);
%% Case
@@ -75,76 +75,76 @@ translate({ fn, Meta, Clauses }, S) ->
translate({'case', Meta, [Expr, KV]}, #elixir_scope{return=Return} = RS) when is_list(KV) ->
S = RS#elixir_scope{noname=true, return=true},
Clauses = elixir_clauses:get_pairs(do, KV),
- { TExpr, NS } = translate(Expr, S),
+ {TExpr, NS} = translate(Expr, S),
RClauses = case elixir_utils:returns_boolean(TExpr) of
true -> rewrite_case_clauses(Clauses);
false -> Clauses
end,
- { TClauses, TS } = elixir_clauses:clauses(Meta, RClauses, Return, NS),
- { { 'case', ?line(Meta), TExpr, TClauses }, TS };
+ {TClauses, TS} = elixir_clauses:clauses(Meta, RClauses, Return, NS),
+ {{'case', ?line(Meta), TExpr, TClauses}, TS};
%% Try
translate({'try', Meta, [Clauses]}, #elixir_scope{return=Return} = RS) when is_list(Clauses) ->
S = RS#elixir_scope{noname=true, return=true},
Do = proplists:get_value('do', Clauses, nil),
- { TDo, SB } = elixir_translator:translate(Do, S),
+ {TDo, SB} = elixir_translator:translate(Do, S),
- Catch = [Tuple || { X, _ } = Tuple <- Clauses, X == 'rescue' orelse X == 'catch'],
- { TCatch, SC } = elixir_try:clauses(Meta, Catch, Return, mergec(S, SB)),
+ Catch = [Tuple || {X, _} = Tuple <- Clauses, X == 'rescue' orelse X == 'catch'],
+ {TCatch, SC} = elixir_try:clauses(Meta, Catch, Return, mergec(S, SB)),
case lists:keyfind('after', 1, Clauses) of
- { 'after', After } ->
- { TBlock, SA } = translate(After, mergec(S, SC)),
+ {'after', After} ->
+ {TBlock, SA} = translate(After, mergec(S, SC)),
TAfter = unblock(TBlock);
false ->
- { TAfter, SA } = { [], mergec(S, SC) }
+ {TAfter, SA} = {[], mergec(S, SC)}
end,
Else = elixir_clauses:get_pairs(else, Clauses),
- { TElse, SE } = elixir_clauses:clauses(Meta, Else, Return, mergec(S, SA)),
+ {TElse, SE} = elixir_clauses:clauses(Meta, Else, Return, mergec(S, SA)),
SF = (mergec(S, SE))#elixir_scope{noname=RS#elixir_scope.noname},
- { { 'try', ?line(Meta), unblock(TDo), TElse, TCatch, TAfter }, SF };
+ {{'try', ?line(Meta), unblock(TDo), TElse, TCatch, TAfter}, SF};
%% Receive
-translate({'receive', Meta, [KV] }, #elixir_scope{return=Return} = RS) when is_list(KV) ->
+translate({'receive', Meta, [KV]}, #elixir_scope{return=Return} = RS) when is_list(KV) ->
S = RS#elixir_scope{return=true},
Do = elixir_clauses:get_pairs(do, KV, true),
case lists:keyfind('after', 1, KV) of
false ->
- { TClauses, SC } = elixir_clauses:clauses(Meta, Do, Return, S),
- { { 'receive', ?line(Meta), TClauses }, SC };
+ {TClauses, SC} = elixir_clauses:clauses(Meta, Do, Return, S),
+ {{'receive', ?line(Meta), TClauses}, SC};
_ ->
After = elixir_clauses:get_pairs('after', KV),
- { TClauses, SC } = elixir_clauses:clauses(Meta, Do ++ After, Return, S),
- { FClauses, TAfter } = elixir_utils:split_last(TClauses),
- { _, _, [FExpr], _, FAfter } = TAfter,
- { { 'receive', ?line(Meta), FClauses, FExpr, FAfter }, SC }
+ {TClauses, SC} = elixir_clauses:clauses(Meta, Do ++ After, Return, S),
+ {FClauses, TAfter} = elixir_utils:split_last(TClauses),
+ {_, _, [FExpr], _, FAfter} = TAfter,
+ {{'receive', ?line(Meta), FClauses, FExpr, FAfter}, SC}
end;
%% Comprehensions
-translate({ Kind, Meta, Args }, S) when is_list(Args), (Kind == lc) orelse (Kind == bc) ->
+translate({Kind, Meta, Args}, S) when is_list(Args), (Kind == lc) orelse (Kind == bc) ->
translate_comprehension(Meta, Kind, Args, S);
-translate({ for, Meta, Args }, S) when is_list(Args) ->
+translate({for, Meta, Args}, S) when is_list(Args) ->
elixir_for:translate(Meta, Args, S);
%% Super
-translate({ super, Meta, Args }, S) when is_list(Args) ->
+translate({super, Meta, Args}, S) when is_list(Args) ->
Module = assert_module_scope(Meta, super, S),
Function = assert_function_scope(Meta, super, S),
elixir_def_overridable:ensure_defined(Meta, Module, Function, S),
- { _, Arity } = Function,
+ {_, Arity} = Function,
- { TArgs, TS } = if
+ {TArgs, TS} = if
length(Args) == Arity ->
translate_args(Args, S);
true ->
@@ -153,34 +153,34 @@ translate({ super, Meta, Args }, S) when is_list(Args) ->
end,
Super = elixir_def_overridable:name(Module, Function),
- { { call, ?line(Meta), { atom, ?line(Meta), Super }, TArgs }, TS#elixir_scope{super=true} };
+ {{call, ?line(Meta), {atom, ?line(Meta), Super}, TArgs}, TS#elixir_scope{super=true}};
%% Variables
-translate({ '^', Meta, [ { Name, VarMeta, Kind } ] }, #elixir_scope{context=match} = S) when is_atom(Name), is_atom(Kind) ->
- Tuple = { Name, var_kind(VarMeta, Kind) },
+translate({'^', Meta, [ {Name, VarMeta, Kind} ]}, #elixir_scope{context=match} = S) when is_atom(Name), is_atom(Kind) ->
+ Tuple = {Name, var_kind(VarMeta, Kind)},
case orddict:find(Tuple, S#elixir_scope.backup_vars) of
- { ok, { Value, _Counter } } ->
- { { var, ?line(Meta), Value }, S };
+ {ok, {Value, _Counter}} ->
+ {{var, ?line(Meta), Value}, S};
error ->
compile_error(Meta, S#elixir_scope.file, "unbound variable ^~ts", [Name])
end;
-translate({ '_', Meta, Kind }, #elixir_scope{context=match} = S) when is_atom(Kind) ->
- { { var, ?line(Meta), '_' }, S };
+translate({'_', Meta, Kind}, #elixir_scope{context=match} = S) when is_atom(Kind) ->
+ {{var, ?line(Meta), '_'}, S};
-translate({ '_', Meta, Kind }, S) when is_atom(Kind) ->
+translate({'_', Meta, Kind}, S) when is_atom(Kind) ->
compile_error(Meta, S#elixir_scope.file, "unbound variable _");
-translate({ Name, Meta, Kind }, #elixir_scope{extra=map_key} = S) when is_atom(Name), is_atom(Kind) ->
+translate({Name, Meta, Kind}, #elixir_scope{extra=map_key} = S) when is_atom(Name), is_atom(Kind) ->
compile_error(Meta, S#elixir_scope.file, "illegal use of variable ~ts in map key", [Name]);
-translate({ Name, Meta, Kind }, S) when is_atom(Name), is_atom(Kind) ->
+translate({Name, Meta, Kind}, S) when is_atom(Name), is_atom(Kind) ->
elixir_scope:translate_var(Meta, Name, var_kind(Meta, Kind), S);
%% Local calls
-translate({ Name, Meta, Args }, S) when is_atom(Name), is_list(Meta), is_list(Args) ->
+translate({Name, Meta, Args}, S) when is_atom(Name), is_list(Meta), is_list(Args) ->
if
S#elixir_scope.context == match ->
compile_error(Meta, S#elixir_scope.file,
@@ -198,20 +198,20 @@ translate({ Name, Meta, Args }, S) when is_atom(Name), is_list(Meta), is_list(Ar
compile_error(Meta, S#elixir_scope.file, "undefined function ~ts/~B", [Name, length(Args)]);
true ->
Line = ?line(Meta),
- { TArgs, NS } = translate_args(Args, S),
- { { call, Line, { atom, Line, Name }, TArgs }, NS }
+ {TArgs, NS} = translate_args(Args, S),
+ {{call, Line, {atom, Line, Name}, TArgs}, NS}
end;
%% Remote calls
-translate({ { '.', _, [Left, Right] }, Meta, Args }, S)
+translate({{'.', _, [Left, Right]}, Meta, Args}, S)
when (is_tuple(Left) orelse is_atom(Left)), is_atom(Right), is_list(Meta), is_list(Args) ->
- { TLeft, SL } = translate(Left, S),
- { TArgs, SA } = translate_args(Args, mergec(S, SL)),
+ {TLeft, SL} = translate(Left, S),
+ {TArgs, SA} = translate_args(Args, mergec(S, SL)),
Line = ?line(Meta),
Arity = length(Args),
- TRight = { atom, Line, Right },
+ TRight = {atom, Line, Right},
%% We need to rewrite erlang function calls as operators
%% because erl_eval chokes on them. We can remove this
@@ -220,46 +220,46 @@ translate({ { '.', _, [Left, Right] }, Meta, Args }, S)
%% [1,2,3] ++ Right in matches).
case (Left == erlang) andalso erl_op(Right, Arity) of
true ->
- { list_to_tuple([op, Line, Right] ++ TArgs), mergev(SL, SA) };
+ {list_to_tuple([op, Line, Right] ++ TArgs), mergev(SL, SA)};
false ->
assert_allowed_in_context(Meta, Left, Right, Arity, S),
SC = mergev(SL, SA),
case not is_atom(Left) andalso (Arity == 0) of
true ->
- { Var, _, SV } = elixir_scope:build_var('_', SC),
- TVar = { var, Line, Var },
- TMap = { tuple, Line, [
- { atom, Line, 'Elixir.KeyError' },
- { atom, Line, '__exception__' },
+ {Var, _, SV} = elixir_scope:build_var('_', SC),
+ TVar = {var, Line, Var},
+ TMap = {tuple, Line, [
+ {atom, Line, 'Elixir.KeyError'},
+ {atom, Line, '__exception__'},
TRight,
- TVar] },
+ TVar]},
- { { 'case', -1, TLeft, [
- { clause, -1,
- [{ map, Line, [{ map_field_exact, Line, TRight, TVar }] }],
+ {{'case', -1, TLeft, [
+ {clause, -1,
+ [{map, Line, [{map_field_exact, Line, TRight, TVar}]}],
[],
- [TVar] },
- { clause, -1,
- [{ match, Line, { map, Line, [] }, TVar }],
+ [TVar]},
+ {clause, -1,
+ [{match, Line, {map, Line, []}, TVar}],
[],
- [?wrap_call(Line, erlang, error, [TMap])] },
- { clause, -1,
+ [?wrap_call(Line, erlang, error, [TMap])]},
+ {clause, -1,
[TVar],
[],
- [{ call, Line, { remote, Line, TVar, TRight }, [] }] }
- ] }, SV };
+ [{call, Line, {remote, Line, TVar, TRight}, []}]}
+ ]}, SV};
false ->
- { { call, Line, { remote, Line, TLeft, TRight }, TArgs }, SC }
+ {{call, Line, {remote, Line, TLeft, TRight}, TArgs}, SC}
end
end;
%% Anonymous function calls
-translate({ { '.', _, [Expr] }, Meta, Args }, S) when is_list(Args) ->
- { TExpr, SE } = translate(Expr, S),
- { TArgs, SA } = translate_args(Args, mergec(S, SE)),
- { { call, ?line(Meta), TExpr, TArgs }, mergev(SE, SA) };
+translate({{'.', _, [Expr]}, Meta, Args}, S) when is_list(Args) ->
+ {TExpr, SE} = translate(Expr, S),
+ {TArgs, SA} = translate_args(Args, mergec(S, SE)),
+ {{call, ?line(Meta), TExpr, TArgs}, mergev(SE, SA)};
%% Literals
@@ -270,12 +270,12 @@ translate(List, S) when is_list(List) ->
end,
translate_list(List, Fun, S, []);
-translate({ Left, Right }, S) ->
- { TArgs, SE } = translate_args([Left, Right], S),
- { { tuple, 0, TArgs }, SE };
+translate({Left, Right}, S) ->
+ {TArgs, SE} = translate_args([Left, Right], S),
+ {{tuple, 0, TArgs}, SE};
translate(Other, S) ->
- { elixir_utils:elixir_to_erl(Other), S }.
+ {elixir_utils:elixir_to_erl(Other), S}.
%% Helpers
@@ -285,23 +285,23 @@ erl_op(Op, Arity) ->
erl_internal:bool_op(Op, Arity) orelse
erl_internal:arith_op(Op, Arity).
-translate_list([{ '|', _, [_, _]=Args}], Fun, Acc, List) ->
- { [TLeft,TRight], TAcc } = lists:mapfoldl(Fun, Acc, Args),
- { build_list([TLeft|List], TRight), TAcc };
+translate_list([{'|', _, [_, _]=Args}], Fun, Acc, List) ->
+ {[TLeft,TRight], TAcc} = lists:mapfoldl(Fun, Acc, Args),
+ {build_list([TLeft|List], TRight), TAcc};
translate_list([H|T], Fun, Acc, List) ->
- { TH, TAcc } = Fun(H, Acc),
+ {TH, TAcc} = Fun(H, Acc),
translate_list(T, Fun, TAcc, [TH|List]);
translate_list([], _Fun, Acc, List) ->
- { build_list(List, { nil, 0 }), Acc }.
+ {build_list(List, {nil, 0}), Acc}.
build_list([H|T], Acc) ->
- build_list(T, { cons, 0, H, Acc });
+ build_list(T, {cons, 0, H, Acc});
build_list([], Acc) ->
Acc.
var_kind(Meta, Kind) ->
case lists:keyfind(counter, 1, Meta) of
- { counter, Counter } -> Counter;
+ {counter, Counter} -> Counter;
false -> Kind
end.
@@ -321,17 +321,17 @@ rewrite_case_clauses(Clauses) ->
Clauses.
%% Pack a list of expressions from a block.
-unblock({ 'block', _, Exprs }) -> Exprs;
+unblock({'block', _, Exprs}) -> Exprs;
unblock(Expr) -> [Expr].
%% Translate args
translate_arg(Arg, Acc, S) when is_number(Arg); is_atom(Arg); is_binary(Arg); is_pid(Arg); is_function(Arg) ->
- { TArg, _ } = translate(Arg, S),
- { TArg, Acc };
+ {TArg, _} = translate(Arg, S),
+ {TArg, Acc};
translate_arg(Arg, Acc, S) ->
- { TArg, TAcc } = translate(Arg, mergec(S, Acc)),
- { TArg, mergev(Acc, TAcc) }.
+ {TArg, TAcc} = translate(Arg, mergec(S, Acc)),
+ {TArg, mergev(Acc, TAcc)}.
translate_args(Args, #elixir_scope{context=match} = S) ->
lists:mapfoldl(fun translate/2, S, Args);
@@ -342,12 +342,12 @@ translate_args(Args, S) ->
%% Translate blocks
translate_block([], Acc, _Return, S) ->
- { lists:reverse(Acc), S };
+ {lists:reverse(Acc), S};
translate_block([H], Acc, Return, S) ->
- { TH, TS } = translate_block(H, Return, S),
+ {TH, TS} = translate_block(H, Return, S),
translate_block([], [TH|Acc], Return, TS);
translate_block([H|T], Acc, Return, S) ->
- { TH, TS } = translate_block(H, false, S),
+ {TH, TS} = translate_block(H, false, S),
translate_block(T, [TH|Acc], Return, TS).
translate_block(Expr, Return, S) ->
@@ -358,22 +358,22 @@ translate_block(Expr, Return, S) ->
%% Expressions that can handle no return may receive
%% return=false but must always return return=true.
-handles_no_return({ 'try', _, [_] }) -> true;
-handles_no_return({ 'for', _, [_|_] }) -> true;
-handles_no_return({ 'case', _, [_, _] }) -> true;
-handles_no_return({ 'receive', _, [_] }) -> true;
-handles_no_return({ '__block__', _, [_|_] }) -> true;
+handles_no_return({'try', _, [_]}) -> true;
+handles_no_return({'for', _, [_|_]}) -> true;
+handles_no_return({'case', _, [_, _]}) -> true;
+handles_no_return({'receive', _, [_]}) -> true;
+handles_no_return({'__block__', _, [_|_]}) -> true;
handles_no_return(_) -> false.
%% Comprehensions
translate_comprehension(Meta, Kind, Args, S) ->
- { Cases, [{do,Expr}] } = elixir_utils:split_last(Args),
- { TCases, SC } = lists:mapfoldl(fun(C, Acc) -> translate_comprehension_clause(Meta, C, Acc) end, S, Cases),
- { TExpr, SE } = translate_comprehension_do(Meta, Kind, Expr, SC),
- { { Kind, ?line(Meta), TExpr, TCases }, mergef(S, SE) }.
+ {Cases, [{do,Expr}]} = elixir_utils:split_last(Args),
+ {TCases, SC} = lists:mapfoldl(fun(C, Acc) -> translate_comprehension_clause(Meta, C, Acc) end, S, Cases),
+ {TExpr, SE} = translate_comprehension_do(Meta, Kind, Expr, SC),
+ {{Kind, ?line(Meta), TExpr, TCases}, mergef(S, SE)}.
-translate_comprehension_do(_Meta, bc, { '<<>>', _, _ } = Expr, S) ->
+translate_comprehension_do(_Meta, bc, {'<<>>', _, _} = Expr, S) ->
translate(Expr, S);
translate_comprehension_do(Meta, bc, _Expr, S) ->
@@ -382,24 +382,24 @@ translate_comprehension_do(Meta, bc, _Expr, S) ->
translate_comprehension_do(_Meta, _Kind, Expr, S) ->
translate(Expr, S).
-translate_comprehension_clause(_Meta, {inbits, Meta, [{ '<<>>', _, _} = Left, Right]}, S) ->
- { TRight, SR } = translate(Right, S),
- { TLeft, SL } = elixir_clauses:match(fun elixir_translator:translate/2, Left, SR),
- { { b_generate, ?line(Meta), TLeft, TRight }, SL };
+translate_comprehension_clause(_Meta, {inbits, Meta, [{'<<>>', _, _} = Left, Right]}, S) ->
+ {TRight, SR} = translate(Right, S),
+ {TLeft, SL } = elixir_clauses:match(fun elixir_translator:translate/2, Left, SR),
+ {{b_generate, ?line(Meta), TLeft, TRight}, SL};
translate_comprehension_clause(_Meta, {inbits, Meta, [_Left, _Right]}, S) ->
compile_error(Meta, S#elixir_scope.file, "a bit comprehension expects a bit string << >> to be used in inbits generators");
translate_comprehension_clause(_Meta, {inlist, Meta, [Left, Right]}, S) ->
- { TRight, SR } = translate(Right, S),
- { TLeft, SL } = elixir_clauses:match(fun elixir_translator:translate/2, Left, SR),
- { { generate, ?line(Meta), TLeft, TRight }, SL };
+ {TRight, SR} = translate(Right, S),
+ {TLeft, SL } = elixir_clauses:match(fun elixir_translator:translate/2, Left, SR),
+ {{generate, ?line(Meta), TLeft, TRight}, SL};
translate_comprehension_clause(Meta, X, S) ->
Line = ?line(Meta),
- { TX, TS } = translate(X, S),
- { BX, BS } = elixir_utils:convert_to_boolean(Line, TX, true, TS),
- { { match, Line, { var, Line, '_' }, BX }, BS }.
+ {TX, TS} = translate(X, S),
+ {BX, BS} = elixir_utils:convert_to_boolean(Line, TX, true, TS),
+ {{match, Line, {var, Line, '_'}, BX}, BS}.
%% Assertions
diff --git a/lib/elixir/src/elixir_try.erl b/lib/elixir/src/elixir_try.erl
index 98d509e51..b69efb794 100644
--- a/lib/elixir/src/elixir_try.erl
+++ b/lib/elixir/src/elixir_try.erl
@@ -6,78 +6,78 @@ clauses(_Meta, Clauses, Return, S) ->
Catch = elixir_clauses:get_pairs('catch', Clauses),
Rescue = elixir_clauses:get_pairs(rescue, Clauses),
Transformer = fun(X, SAcc) ->
- { TX, TS } = each_clause(X, Return, SAcc),
- { TX, elixir_scope:mergec(S, TS) }
+ {TX, TS} = each_clause(X, Return, SAcc),
+ {TX, elixir_scope:mergec(S, TS)}
end,
lists:mapfoldl(Transformer, S, Rescue ++ Catch).
-each_clause({ 'catch', Meta, Raw, Expr }, Return, S) ->
- { Args, Guards } = elixir_clauses:extract_splat_guards(Raw),
+each_clause({'catch', Meta, Raw, Expr}, Return, S) ->
+ {Args, Guards} = elixir_clauses:extract_splat_guards(Raw),
Final = case Args of
- [X] -> [throw, X, { '_', Meta, nil }];
- [X,Y] -> [X, Y, { '_', Meta, nil }];
+ [X] -> [throw, X, {'_', Meta, nil}];
+ [X,Y] -> [X, Y, {'_', Meta, nil}];
_ ->
elixir_errors:compile_error(Meta, S#elixir_scope.file, "too many arguments given for catch")
end,
- Condition = [{ '{}', Meta, Final }],
+ Condition = [{'{}', Meta, Final}],
elixir_clauses:clause(?line(Meta), fun elixir_translator:translate_args/2,
Condition, Expr, Guards, Return, S);
-each_clause({ rescue, Meta, [{ in, _, [Left, Right]}], Expr }, Return, S) ->
+each_clause({rescue, Meta, [{in, _, [Left, Right]}], Expr}, Return, S) ->
case Left of
- { '_', _, LAtom } when is_atom(LAtom) ->
- { VarName, _, CS } = elixir_scope:build_var('_', S),
- { Clause, _ } = rescue_guards(Meta, { VarName, Meta, nil }, Right, S),
- each_clause({ 'catch', Meta, Clause, Expr }, Return, CS);
+ {'_', _, LAtom} when is_atom(LAtom) ->
+ {VarName, _, CS} = elixir_scope:build_var('_', S),
+ {Clause, _} = rescue_guards(Meta, {VarName, Meta, nil}, Right, S),
+ each_clause({'catch', Meta, Clause, Expr}, Return, CS);
_ ->
- { Clause, Safe } = rescue_guards(Meta, Left, Right, S),
+ {Clause, Safe} = rescue_guards(Meta, Left, Right, S),
case Safe of
true ->
- each_clause({ 'catch', Meta, Clause, Expr }, Return, S);
+ each_clause({'catch', Meta, Clause, Expr}, Return, S);
false ->
- { VarName, _, CS } = elixir_scope:build_var('_', S),
- ClauseVar = { VarName, Meta, nil },
- { FinalClause, _ } = rescue_guards(Meta, ClauseVar, Right, S),
- Match = { '=', Meta, [
+ {VarName, _, CS} = elixir_scope:build_var('_', S),
+ ClauseVar = {VarName, Meta, nil},
+ {FinalClause, _} = rescue_guards(Meta, ClauseVar, Right, S),
+ Match = {'=', Meta, [
Left,
- { { '.', Meta, ['Elixir.Exception', normalize] }, Meta, [error, ClauseVar] }
- ] },
+ {{'.', Meta, ['Elixir.Exception', normalize]}, Meta, [error, ClauseVar]}
+ ]},
FinalExpr = prepend_to_block(Meta, Match, Expr),
- each_clause({ 'catch', Meta, FinalClause, FinalExpr }, Return, CS)
+ each_clause({'catch', Meta, FinalClause, FinalExpr}, Return, CS)
end
end;
-each_clause({ rescue, Meta, _, _ }, _Return, S) ->
+each_clause({rescue, Meta, _, _}, _Return, S) ->
elixir_errors:compile_error(Meta, S#elixir_scope.file, "invalid arguments for rescue in try");
-each_clause({ Key, Meta, _, _ }, _Return, S) ->
+each_clause({Key, Meta, _, _}, _Return, S) ->
elixir_errors:compile_error(Meta, S#elixir_scope.file, "invalid key ~ts in try", [Key]).
%% Helpers
%% Convert rescue clauses into guards.
-rescue_guards(_, Var, { '_', _, _ }, _) -> { [error, Var], false };
+rescue_guards(_, Var, {'_', _, _}, _) -> {[error, Var], false};
rescue_guards(Meta, Var, Guards, S) ->
- { RawElixir, RawErlang } = rescue_each_var(Meta, Var, Guards),
- { Elixir, Erlang, Safe } = rescue_each_ref(Meta, Var, Guards, RawElixir, RawErlang, RawErlang == [], S),
+ {RawElixir, RawErlang} = rescue_each_var(Meta, Var, Guards),
+ {Elixir, Erlang, Safe} = rescue_each_ref(Meta, Var, Guards, RawElixir, RawErlang, RawErlang == [], S),
Final = case Elixir == [] of
true -> Erlang;
false ->
- IsTuple = { erl(Meta, is_tuple), Meta, [Var] },
- IsException = { erl(Meta, '=='), Meta, [
- { erl(Meta, element), Meta, [2, Var] }, '__exception__'
- ] },
+ IsTuple = {erl(Meta, is_tuple), Meta, [Var]},
+ IsException = {erl(Meta, '=='), Meta, [
+ {erl(Meta, element), Meta, [2, Var]}, '__exception__'
+ ]},
OrElse = join(Meta, fun erl_or/3, Elixir),
[join(Meta, fun erl_and/3, [IsTuple, IsException, OrElse])|Erlang]
end,
{
- [{ 'when', Meta, [error, Var, join_when(Meta, Final)] }],
+ [{'when', Meta, [error, Var, join_when(Meta, Final)]}],
Safe
- }.
+ }.
%% Handle variables in the right side of rescue.
@@ -85,16 +85,16 @@ rescue_each_var(Meta, ClauseVar, Guards) ->
Vars = [Var || Var <- Guards, is_var(Var)],
case Vars == [] of
- true -> { [], [] };
+ true -> {[], []};
false ->
Elixir = [erl_exception_compare(Meta, ClauseVar, Var) || Var <- Vars],
Erlang = lists:map(fun(Rescue) ->
- Compares = [{ erl(Meta, '=='), Meta, [Rescue, Var] } || Var <- Vars],
+ Compares = [{erl(Meta, '=='), Meta, [Rescue, Var]} || Var <- Vars],
erl_and(Meta,
erl_rescue_guard_for(Meta, ClauseVar, Rescue),
join(Meta, fun erl_or/3, Compares))
end, erlang_rescues()),
- { Elixir, Erlang }
+ {Elixir, Erlang}
end.
%% Rescue each atom name considering their Erlang or Elixir matches.
@@ -102,7 +102,7 @@ rescue_each_var(Meta, ClauseVar, Guards) ->
%% method for optimization.
%% Ignore variables
-rescue_each_ref(Meta, Var, [{ Name, _, Atom }|T], Elixir, Erlang, Safe, S) when is_atom(Name), is_atom(Atom) ->
+rescue_each_ref(Meta, Var, [{Name, _, Atom}|T], Elixir, Erlang, Safe, S) when is_atom(Name), is_atom(Atom) ->
rescue_each_ref(Meta, Var, T, Elixir, Erlang, Safe, S);
rescue_each_ref(Meta, Var, [H|T], Elixir, Erlang, _Safe, S) when
@@ -122,14 +122,14 @@ rescue_each_ref(Meta, Var, [H|T], Elixir, Erlang, Safe, S) when is_atom(H) ->
rescue_each_ref(Meta, Var, [H|T], Elixir, Erlang, Safe, S) ->
case elixir_translator:translate(H, S) of
- { { atom, _, Atom }, _ } ->
+ {{atom, _, Atom}, _} ->
rescue_each_ref(Meta, Var, [Atom|T], Elixir, Erlang, Safe, S);
_ ->
rescue_each_ref(Meta, Var, T, [erl_exception_compare(Meta, Var, H)|Elixir], Erlang, Safe, S)
end;
rescue_each_ref(_, _, [], Elixir, Erlang, Safe, _) ->
- { Elixir, Erlang, Safe }.
+ {Elixir, Erlang, Safe}.
%% Handle erlang rescue matches.
@@ -145,16 +145,16 @@ erl_rescue_guard_for(Meta, Var, List) when is_list(List) ->
join(Meta, fun erl_or/3, [erl_rescue_guard_for(Meta, Var, X) || X <- List]);
erl_rescue_guard_for(Meta, Var, 'Elixir.UndefinedFunctionError') ->
- { erl(Meta, '=='), Meta, [Var, undef] };
+ {erl(Meta, '=='), Meta, [Var, undef]};
erl_rescue_guard_for(Meta, Var, 'Elixir.FunctionClauseError') ->
- { erl(Meta, '=='), Meta, [Var, function_clause] };
+ {erl(Meta, '=='), Meta, [Var, function_clause]};
erl_rescue_guard_for(Meta, Var, 'Elixir.SystemLimitError') ->
- { erl(Meta, '=='), Meta, [Var, system_limit] };
+ {erl(Meta, '=='), Meta, [Var, system_limit]};
erl_rescue_guard_for(Meta, Var, 'Elixir.ArithmeticError') ->
- { erl(Meta, '=='), Meta, [Var, badarith] };
+ {erl(Meta, '=='), Meta, [Var, badarith]};
erl_rescue_guard_for(Meta, Var, 'Elixir.BadArityError') ->
erl_and(Meta,
@@ -188,50 +188,50 @@ erl_rescue_guard_for(Meta, Var, 'Elixir.BadStructError') ->
erl_rescue_guard_for(Meta, Var, 'Elixir.ArgumentError') ->
erl_or(Meta,
- { erl(Meta, '=='), Meta, [Var, badarg] },
+ {erl(Meta, '=='), Meta, [Var, badarg]},
erl_and(Meta,
erl_tuple_size(Meta, Var, 2),
erl_exception_compare(Meta, Var, badarg)));
erl_rescue_guard_for(Meta, Var, 'Elixir.ErlangError') ->
- IsNotTuple = { erl(Meta, 'not'), Meta, [{ erl(Meta, is_tuple), Meta, [Var] }] },
- IsException = { erl(Meta, '/='), Meta, [
- { erl(Meta, element), Meta, [2, Var] }, '__exception__'
- ] },
+ IsNotTuple = {erl(Meta, 'not'), Meta, [{erl(Meta, is_tuple), Meta, [Var]}]},
+ IsException = {erl(Meta, '/='), Meta, [
+ {erl(Meta, element), Meta, [2, Var]}, '__exception__'
+ ]},
erl_or(Meta, IsNotTuple, IsException).
%% Helpers
-format_error({ rescue_no_match, Var, Alias }) ->
+format_error({rescue_no_match, Var, Alias}) ->
VarBinary = 'Elixir.Macro':to_string(Var),
AliasBinary = 'Elixir.Macro':to_string(Alias),
Message = "rescue clause (~ts = ~ts) can never match, maybe you meant to write: ~ts in [~ts] ?",
io_lib:format(Message, [AliasBinary, VarBinary, VarBinary, AliasBinary]).
-is_var({ Name, _, Atom }) when is_atom(Name), is_atom(Atom) -> true;
+is_var({Name, _, Atom}) when is_atom(Name), is_atom(Atom) -> true;
is_var(_) -> false.
erl_tuple_size(Meta, Var, Size) ->
- { erl(Meta, '=='), Meta, [{ erl(Meta, tuple_size), Meta, [Var] }, Size] }.
+ {erl(Meta, '=='), Meta, [{erl(Meta, tuple_size), Meta, [Var]}, Size]}.
erl_exception_compare(Meta, Var, Expr) ->
- { erl(Meta, '=='), Meta, [
- { erl(Meta, element), Meta, [1, Var] },
+ {erl(Meta, '=='), Meta, [
+ {erl(Meta, element), Meta, [1, Var]},
Expr
- ] }.
+ ]}.
join(Meta, Kind, [H|T]) ->
lists:foldl(fun(X, Acc) -> Kind(Meta, Acc, X) end, H, T).
join_when(Meta, [H|T]) ->
- lists:foldl(fun(X, Acc) -> { 'when', Meta, [X, Acc] } end, H, T).
+ lists:foldl(fun(X, Acc) -> {'when', Meta, [X, Acc]} end, H, T).
-prepend_to_block(_Meta, Expr, { '__block__', Meta, Args }) ->
- { '__block__', Meta, [Expr|Args] };
+prepend_to_block(_Meta, Expr, {'__block__', Meta, Args}) ->
+ {'__block__', Meta, [Expr|Args]};
prepend_to_block(Meta, Expr, Args) ->
- { '__block__', Meta, [Expr, Args] }.
+ {'__block__', Meta, [Expr, Args]}.
-erl(Meta, Op) -> { '.', Meta, [erlang, Op] }.
-erl_or(Meta, Left, Right) -> { '__op__', Meta, ['orelse', Left, Right] }.
-erl_and(Meta, Left, Right) -> { '__op__', Meta, ['andalso', Left, Right] }.
+erl(Meta, Op) -> {'.', Meta, [erlang, Op]}.
+erl_or(Meta, Left, Right) -> {'__op__', Meta, ['orelse', Left, Right]}.
+erl_and(Meta, Left, Right) -> {'__op__', Meta, ['andalso', Left, Right]}.
diff --git a/lib/elixir/src/elixir_utils.erl b/lib/elixir/src/elixir_utils.erl
index 8594376b0..1b5d51221 100644
--- a/lib/elixir/src/elixir_utils.erl
+++ b/lib/elixir/src/elixir_utils.erl
@@ -10,13 +10,13 @@
get_line(Opts) when is_list(Opts) ->
case lists:keyfind(line, 1, Opts) of
- { line, Line } when is_integer(Line) -> Line;
+ {line, Line} when is_integer(Line) -> Line;
false -> 0
end.
-split_last([]) -> { [], [] };
+split_last([]) -> {[], []};
split_last(List) -> split_last(List, []).
-split_last([H], Acc) -> { lists:reverse(Acc), H };
+split_last([H], Acc) -> {lists:reverse(Acc), H};
split_last([H|T], Acc) -> split_last(T, [H|Acc]).
file_type(File) ->
@@ -24,8 +24,8 @@ file_type(File) ->
file_type(File, Op) ->
case file:Op(File) of
- { ok, #file_info{type=Type} } -> { ok, Type };
- { error, _ } = Error -> Error
+ {ok, #file_info{type=Type}} -> {ok, Type};
+ {error, _} = Error -> Error
end.
relative_to_cwd(Path) ->
@@ -54,45 +54,45 @@ characters_to_binary(Data) ->
%% that's why things like maps and references are not in the list.
elixir_to_erl(Tree) when is_tuple(Tree) ->
- { tuple, 0, [elixir_to_erl(X) || X <- tuple_to_list(Tree)] };
+ {tuple, 0, [elixir_to_erl(X) || X <- tuple_to_list(Tree)]};
elixir_to_erl([]) ->
- { nil, 0 };
+ {nil, 0};
elixir_to_erl(<<>>) ->
- { bin, 0, [] };
+ {bin, 0, []};
elixir_to_erl(Tree) when is_list(Tree) ->
elixir_to_erl_cons_1(Tree, []);
elixir_to_erl(Tree) when is_atom(Tree) ->
- { atom, 0, Tree };
+ {atom, 0, Tree};
elixir_to_erl(Tree) when is_integer(Tree) ->
- { integer, 0, Tree };
+ {integer, 0, Tree};
elixir_to_erl(Tree) when is_float(Tree) ->
- { float, 0, Tree };
+ {float, 0, Tree};
elixir_to_erl(Tree) when is_binary(Tree) ->
%% Note that our binaries are utf-8 encoded and we are converting
%% to a list using binary_to_list. The reason for this is that Erlang
%% considers a string in a binary to be encoded in latin1, so the bytes
%% are not changed in any fashion.
- { bin, 0, [{ bin_element, 0, { string, 0, binary_to_list(Tree) }, default, default }] };
+ {bin, 0, [{bin_element, 0, {string, 0, binary_to_list(Tree)}, default, default}]};
elixir_to_erl(Function) when is_function(Function) ->
- case (erlang:fun_info(Function, type) == { type, external }) andalso
- (erlang:fun_info(Function, env) == { env, [] }) of
+ case (erlang:fun_info(Function, type) == {type, external}) andalso
+ (erlang:fun_info(Function, env) == {env, []}) of
true ->
- { module, Module } = erlang:fun_info(Function, module),
- { name, Name } = erlang:fun_info(Function, name),
- { arity, Arity } = erlang:fun_info(Function, arity),
-
- { 'fun', 0, { function,
- { atom, 0, Module },
- { atom, 0, Name },
- { integer, 0, Arity } } };
+ {module, Module} = erlang:fun_info(Function, module),
+ {name, Name} = erlang:fun_info(Function, name),
+ {arity, Arity} = erlang:fun_info(Function, arity),
+
+ {'fun', 0, {function,
+ {atom, 0, Module},
+ {atom, 0, Name},
+ {integer, 0, Arity}}};
false ->
error(badarg)
end;
@@ -107,37 +107,37 @@ elixir_to_erl_cons_1([H|T], Acc) -> elixir_to_erl_cons_1(T, [H|Acc]);
elixir_to_erl_cons_1(Other, Acc) -> elixir_to_erl_cons_2(Acc, elixir_to_erl(Other)).
elixir_to_erl_cons_2([H|T], Acc) ->
- elixir_to_erl_cons_2(T, { cons, 0, elixir_to_erl(H), Acc });
+ elixir_to_erl_cons_2(T, {cons, 0, elixir_to_erl(H), Acc});
elixir_to_erl_cons_2([], Acc) ->
Acc.
%% Boolean checks
-returns_boolean({ op, _, Op, _ }) when Op == 'not' -> true;
+returns_boolean({op, _, Op, _}) when Op == 'not' -> true;
-returns_boolean({ op, _, Op, _, _ }) when
+returns_boolean({op, _, Op, _, _}) when
Op == 'and'; Op == 'or'; Op == 'xor';
Op == '=='; Op == '/='; Op == '=<'; Op == '>=';
Op == '<'; Op == '>'; Op == '=:='; Op == '=/=' -> true;
-returns_boolean({ op, _, Op, _, Right }) when Op == 'andalso'; Op == 'orelse' ->
+returns_boolean({op, _, Op, _, Right}) when Op == 'andalso'; Op == 'orelse' ->
returns_boolean(Right);
-returns_boolean({ call, _, { remote, _, { atom, _, erlang }, { atom, _, Fun } }, [_] }) when
+returns_boolean({call, _, {remote, _, {atom, _, erlang}, {atom, _, Fun}}, [_]}) when
Fun == is_atom; Fun == is_binary; Fun == is_bitstring; Fun == is_boolean;
Fun == is_float; Fun == is_function; Fun == is_integer; Fun == is_list;
Fun == is_number; Fun == is_pid; Fun == is_port; Fun == is_reference;
Fun == is_tuple -> true;
-returns_boolean({ call, _, { remote, _, { atom, _, erlang }, { atom, _, Fun } }, [_,_] }) when
+returns_boolean({call, _, {remote, _, {atom, _, erlang}, {atom, _, Fun}}, [_,_]}) when
Fun == is_function -> true;
-returns_boolean({ call, _, { remote, _, { atom, _, erlang }, { atom, _, Fun } }, [_,_,_] }) when
+returns_boolean({call, _, {remote, _, {atom, _, erlang}, {atom, _, Fun}}, [_,_,_]}) when
Fun == function_exported -> true;
-returns_boolean({ atom, _, Bool }) when is_boolean(Bool) -> true;
+returns_boolean({atom, _, Bool}) when is_boolean(Bool) -> true;
-returns_boolean({ 'case', _, _, Clauses }) ->
+returns_boolean({'case', _, _, Clauses}) ->
lists:all(fun
({clause,_,_,_,[Expr]}) -> returns_boolean(Expr);
(_) -> false
@@ -146,9 +146,9 @@ returns_boolean({ 'case', _, _, Clauses }) ->
returns_boolean(_) -> false.
convert_to_boolean(Line, Expr, Bool, S) when is_integer(Line) ->
- case { returns_boolean(Expr), Bool } of
- { true, true } -> { Expr, S };
- { true, false } -> { { op, Line, 'not', Expr }, S };
+ case {returns_boolean(Expr), Bool} of
+ {true, true} -> {Expr, S};
+ {true, false} -> {{op, Line, 'not', Expr}, S};
_ -> do_convert_to_boolean(Line, Expr, Bool, S)
end.
@@ -156,20 +156,20 @@ convert_to_boolean(Line, Expr, Bool, S) when is_integer(Line) ->
%% and false checks in the same clause since
%% it makes dialyzer happy.
do_convert_to_boolean(Line, Expr, Bool, S) ->
- { Name, _, TS } = elixir_scope:build_var('_', S),
- Var = { var, Line, Name },
- Any = { var, Line, '_' },
+ {Name, _, TS} = elixir_scope:build_var('_', S),
+ Var = {var, Line, Name},
+ Any = {var, Line, '_'},
OrElse = do_guarded_convert_to_boolean(Line, Var, 'orelse', '=='),
- FalseResult = { atom,Line,not Bool },
- TrueResult = { atom,Line,Bool },
+ FalseResult = {atom,Line,not Bool},
+ TrueResult = {atom,Line,Bool},
- { { 'case', Line, Expr, [
- { clause, Line, [Var], [[OrElse]], [FalseResult] },
- { clause, Line, [Any], [], [TrueResult] }
- ] }, TS }.
+ {{'case', Line, Expr, [
+ {clause, Line, [Var], [[OrElse]], [FalseResult]},
+ {clause, Line, [Any], [], [TrueResult]}
+ ]}, TS}.
do_guarded_convert_to_boolean(Line, Expr, Op, Comp) ->
- Left = { op, Line, Comp, Expr, { atom, Line, false } },
- Right = { op, Line, Comp, Expr, { atom, Line, nil } },
- { op, Line, Op, Left, Right }. \ No newline at end of file
+ Left = {op, Line, Comp, Expr, {atom, Line, false}},
+ Right = {op, Line, Comp, Expr, {atom, Line, nil}},
+ {op, Line, Op, Left, Right}. \ No newline at end of file
diff --git a/lib/elixir/test/elixir/access_test.exs b/lib/elixir/test/elixir/access_test.exs
index fb147970e..854616353 100644
--- a/lib/elixir/test/elixir/access_test.exs
+++ b/lib/elixir/test/elixir/access_test.exs
@@ -28,10 +28,10 @@ defmodule AccessTest do
end
test :map do
- assert %{ foo: :bar }[:foo] == :bar
- assert %{ 1 => 1 }[1] == 1
- assert %{ 1.0 => 1.0 }[1.0] == 1.0
- assert %{ 1 => 1 }[1.0] == nil
+ assert %{foo: :bar}[:foo] == :bar
+ assert %{1 => 1}[1] == 1
+ assert %{1.0 => 1.0}[1.0] == 1.0
+ assert %{1 => 1}[1.0] == nil
end
test :atom do
diff --git a/lib/elixir/test/elixir/base_test.exs b/lib/elixir/test/elixir/base_test.exs
index da79251ab..3f186bcae 100644
--- a/lib/elixir/test/elixir/base_test.exs
+++ b/lib/elixir/test/elixir/base_test.exs
@@ -16,14 +16,14 @@ defmodule BaseTest do
end
test "decode16" do
- assert { :ok, "" } == decode16("")
- assert { :ok, "f" } == decode16("66")
- assert { :ok, "fo" } == decode16("666F")
- assert { :ok, "foo" } == decode16("666F6F")
- assert { :ok, "foob" } == decode16("666F6F62")
- assert { :ok, "fooba" } == decode16("666F6F6261")
- assert { :ok, "foobar" } == decode16("666F6F626172")
- assert { :ok, <<161, 178, 195, 212, 229, 246, 120, 145>> } == decode16("A1B2C3D4E5F67891")
+ assert {:ok, ""} == decode16("")
+ assert {:ok, "f"} == decode16("66")
+ assert {:ok, "fo"} == decode16("666F")
+ assert {:ok, "foo"} == decode16("666F6F")
+ assert {:ok, "foob"} == decode16("666F6F62")
+ assert {:ok, "fooba"} == decode16("666F6F6261")
+ assert {:ok, "foobar"} == decode16("666F6F626172")
+ assert {:ok, <<161, 178, 195, 212, 229, 246, 120, 145>>} == decode16("A1B2C3D4E5F67891")
end
test "decode16!" do
@@ -85,7 +85,7 @@ defmodule BaseTest do
end
test "decode64 empty" do
- assert { :ok, "" } == decode64("")
+ assert {:ok, ""} == decode64("")
end
test "decode64! empty" do
@@ -93,7 +93,7 @@ defmodule BaseTest do
end
test "decode64 two pads" do
- assert { :ok, "Aladdin:open sesame" } == decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
+ assert {:ok, "Aladdin:open sesame"} == decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
end
test "decode64! two pads" do
@@ -101,7 +101,7 @@ defmodule BaseTest do
end
test "decode64 one pad" do
- assert { :ok, "Hello World" } == decode64("SGVsbG8gV29ybGQ=")
+ assert {:ok, "Hello World"} == decode64("SGVsbG8gV29ybGQ=")
end
test "decode64! one pad" do
@@ -109,7 +109,7 @@ defmodule BaseTest do
end
test "decode64 no pad" do
- assert { :ok, "Aladdin:open sesam" } == decode64("QWxhZGRpbjpvcGVuIHNlc2Ft")
+ assert {:ok, "Aladdin:open sesam"} == decode64("QWxhZGRpbjpvcGVuIHNlc2Ft")
end
test "decode64! no pad" do
@@ -159,7 +159,7 @@ defmodule BaseTest do
end
test "url_decode64 empty" do
- assert { :ok, "" } == url_decode64("")
+ assert {:ok, ""} == url_decode64("")
end
test "url_decode64! empty" do
@@ -167,7 +167,7 @@ defmodule BaseTest do
end
test "url_decode64 two pads" do
- assert { :ok, "Aladdin:open sesame" } == url_decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
+ assert {:ok, "Aladdin:open sesame"} == url_decode64("QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
end
test "url_decode64! two pads" do
@@ -175,7 +175,7 @@ defmodule BaseTest do
end
test "url_decode64 one pad" do
- assert { :ok, "Hello World" } == url_decode64("SGVsbG8gV29ybGQ=")
+ assert {:ok, "Hello World"} == url_decode64("SGVsbG8gV29ybGQ=")
end
test "url_decode64! one pad" do
@@ -183,7 +183,7 @@ defmodule BaseTest do
end
test "url_decode64 no pad" do
- assert { :ok, "Aladdin:open sesam" } == url_decode64("QWxhZGRpbjpvcGVuIHNlc2Ft")
+ assert {:ok, "Aladdin:open sesam"} == url_decode64("QWxhZGRpbjpvcGVuIHNlc2Ft")
end
test "url_decode64! no pad" do
@@ -236,7 +236,7 @@ defmodule BaseTest do
end
test "decode32 empty" do
- assert { :ok, "" } == decode32("")
+ assert {:ok, ""} == decode32("")
end
test "decode32! empty" do
@@ -244,7 +244,7 @@ defmodule BaseTest do
end
test "decode32 one pad" do
- assert { :ok, "foob" } == decode32("MZXW6YQ=")
+ assert {:ok, "foob"} == decode32("MZXW6YQ=")
end
test "decode32! one pad" do
@@ -252,7 +252,7 @@ defmodule BaseTest do
end
test "decode32 three pads" do
- assert { :ok, "foo" } == decode32("MZXW6===")
+ assert {:ok, "foo"} == decode32("MZXW6===")
end
test "decode32! three pads" do
@@ -260,7 +260,7 @@ defmodule BaseTest do
end
test "decode32 four pads" do
- assert { :ok, "fo" } == decode32("MZXQ====")
+ assert {:ok, "fo"} == decode32("MZXQ====")
end
test "decode32! four pads" do
@@ -268,8 +268,8 @@ defmodule BaseTest do
end
test "decode32 six pads" do
- assert { :ok, "foobar" } == decode32("MZXW6YTBOI======")
- assert { :ok, "f" } == decode32("MY======")
+ assert {:ok, "foobar"} == decode32("MZXW6YTBOI======")
+ assert {:ok, "f"} == decode32("MY======")
end
test "decode32! six pads" do
@@ -278,7 +278,7 @@ defmodule BaseTest do
end
test "decode32 no pads" do
- assert { :ok, "fooba" } == decode32("MZXW6YTB")
+ assert {:ok, "fooba"} == decode32("MZXW6YTB")
end
test "decode32! no pads" do
@@ -331,7 +331,7 @@ defmodule BaseTest do
end
test "hex_decode32 empty" do
- assert { :ok, "" } == hex_decode32("")
+ assert {:ok, ""} == hex_decode32("")
end
test "hex_decode32! empty" do
@@ -339,7 +339,7 @@ defmodule BaseTest do
end
test "hex_decode32 one pad" do
- assert { :ok, "foob" } == hex_decode32("CPNMUOG=")
+ assert {:ok, "foob"} == hex_decode32("CPNMUOG=")
end
test "hex_decode32! one pad" do
@@ -347,7 +347,7 @@ defmodule BaseTest do
end
test "hex_decode32 three pads" do
- assert { :ok, "foo" } == hex_decode32("CPNMU===")
+ assert {:ok, "foo"} == hex_decode32("CPNMU===")
end
test "hex_decode32! three pads" do
@@ -355,7 +355,7 @@ defmodule BaseTest do
end
test "hex_decode32 four pads" do
- assert { :ok, "fo" } == hex_decode32("CPNG====")
+ assert {:ok, "fo"} == hex_decode32("CPNG====")
end
test "hex_decode32! four pads" do
@@ -363,8 +363,8 @@ defmodule BaseTest do
end
test "hex_decode32 six pads" do
- assert { :ok, "foobar" } == hex_decode32("CPNMUOJ1E8======")
- assert { :ok, "f" } == hex_decode32("CO======")
+ assert {:ok, "foobar"} == hex_decode32("CPNMUOJ1E8======")
+ assert {:ok, "f"} == hex_decode32("CO======")
end
test "hex_decode32! six pads" do
@@ -373,7 +373,7 @@ defmodule BaseTest do
end
test "hex_decode32 no pads" do
- assert { :ok, "fooba" } == hex_decode32("CPNMUOJ1")
+ assert {:ok, "fooba"} == hex_decode32("CPNMUOJ1")
end
test "hex_decode32! no pads" do
diff --git a/lib/elixir/test/elixir/behaviour_test.exs b/lib/elixir/test/elixir/behaviour_test.exs
index ccbf25525..6979ef95e 100644
--- a/lib/elixir/test/elixir/behaviour_test.exs
+++ b/lib/elixir/test/elixir/behaviour_test.exs
@@ -19,7 +19,7 @@ defmodule BehaviourTest do
defcallback orr(atom | integer) :: atom
- defcallback literal(123, { atom }, :atom, [integer], true) :: atom
+ defcallback literal(123, {atom}, :atom, [integer], true) :: atom
@doc "I should be last."
defmacrocallback last(integer) :: Macro.t
diff --git a/lib/elixir/test/elixir/char_list_test.exs b/lib/elixir/test/elixir/char_list_test.exs
index 3cba889b9..7e074c54c 100644
--- a/lib/elixir/test/elixir/char_list_test.exs
+++ b/lib/elixir/test/elixir/char_list_test.exs
@@ -44,11 +44,11 @@ bar '''
end
test :from_char_data do
- assert List.from_char_data("æß") == { :ok, [?æ, ?ß] }
- assert List.from_char_data("abc") == { :ok, [?a, ?b, ?c] }
+ assert List.from_char_data("æß") == {:ok, [?æ, ?ß]}
+ assert List.from_char_data("abc") == {:ok, [?a, ?b, ?c]}
- assert List.from_char_data(<< 0xDF, 0xFF >>) == { :error, [], << 223, 255 >> }
- assert List.from_char_data(<< 106, 111, 115, 195 >>) == { :incomplete, 'jos', << 195 >> }
+ assert List.from_char_data(<< 0xDF, 0xFF >>) == {:error, [], << 223, 255 >>}
+ assert List.from_char_data(<< 106, 111, 115, 195 >>) == {:incomplete, 'jos', << 195 >>}
end
test :from_char_data! do
diff --git a/lib/elixir/test/elixir/code_test.exs b/lib/elixir/test/elixir/code_test.exs
index 29631909a..bb88ed795 100644
--- a/lib/elixir/test/elixir/code_test.exs
+++ b/lib/elixir/test/elixir/code_test.exs
@@ -13,54 +13,54 @@ defmodule CodeTest do
contents = quote do
defmodule CodeTest.Sample do
- def eval_quoted_info, do: { __MODULE__, __ENV__.file, __ENV__.line }
+ def eval_quoted_info, do: {__MODULE__, __ENV__.file, __ENV__.line}
end
end
Code.eval_quoted contents, [], file: "sample.ex", line: 13
test :eval_string do
- assert Code.eval_string("1 + 2") == { 3, [] }
- assert { 3, _ } = Code.eval_string("a + b", [a: 1, b: 2], __ENV__.location)
+ assert Code.eval_string("1 + 2") == {3, []}
+ assert {3, _} = Code.eval_string("a + b", [a: 1, b: 2], __ENV__.location)
end
test :eval_string_with_other_context do
- assert Code.eval_string("var!(a, Sample) = 1") == { 1, [{{:a,Sample},1}] }
+ assert Code.eval_string("var!(a, Sample) = 1") == {1, [{{:a,Sample},1}]}
end
test :eval_with_unnamed_scopes do
- assert { RuntimeError[], [a: RuntimeError[]] } =
+ assert {RuntimeError[], [a: RuntimeError[]]} =
Code.eval_string("a = (try do (raise \"hello\") rescue e -> e end)")
end
test :eval_with_scope do
- assert Code.eval_string("one", [], delegate_locals_to: __MODULE__) == { 1, [] }
+ assert Code.eval_string("one", [], delegate_locals_to: __MODULE__) == {1, []}
end
test :eval_options do
assert Code.eval_string("is_atom(:foo) and is_record(1..2, Range) and K.is_list([])", [],
- functions: [{ Kernel, [is_atom: 1] }],
- macros: [{ Kernel, [..: 2, and: 2, is_record: 2]}],
+ functions: [{Kernel, [is_atom: 1]}],
+ macros: [{Kernel, [..: 2, and: 2, is_record: 2]}],
aliases: [{K, Kernel}],
- requires: [Kernel]) == { true, [] }
+ requires: [Kernel]) == {true, []}
end
test :eval_with_requires do
- assert Code.eval_string("Kernel.if true, do: :ok", [], requires: [Z, Kernel]) == { :ok, [] }
+ assert Code.eval_string("Kernel.if true, do: :ok", [], requires: [Z, Kernel]) == {:ok, []}
end
test :eval_quoted do
- assert Code.eval_quoted(quote(do: 1 + 2)) == { 3, [] }
- assert CodeTest.Sample.eval_quoted_info() == { CodeTest.Sample, "sample.ex", 13 }
+ assert Code.eval_quoted(quote(do: 1 + 2)) == {3, []}
+ assert CodeTest.Sample.eval_quoted_info() == {CodeTest.Sample, "sample.ex", 13}
end
test :eval_quoted_with_env do
alias :lists, as: MyList
- assert Code.eval_quoted(quote(do: MyList.flatten [[1, 2, 3]]), [], __ENV__) == { [1, 2, 3],[] }
+ assert Code.eval_quoted(quote(do: MyList.flatten [[1, 2, 3]]), [], __ENV__) == {[1, 2, 3],[]}
end
test :eval_file do
- assert Code.eval_file(fixture_path("code_sample.exs")) == { 3, [var: 3] }
+ assert Code.eval_file(fixture_path("code_sample.exs")) == {3, [var: 3]}
end
test :require do
@@ -74,11 +74,11 @@ defmodule CodeTest do
end
test :string_to_quoted do
- assert Code.string_to_quoted("1 + 2") == { :ok, { :+, [line: 1], [1, 2] } }
- assert Code.string_to_quoted!("1 + 2") == { :+, [line: 1], [1, 2] }
+ assert Code.string_to_quoted("1 + 2") == {:ok, {:+, [line: 1], [1, 2]}}
+ assert Code.string_to_quoted!("1 + 2") == {:+, [line: 1], [1, 2]}
assert Code.string_to_quoted("a.1") ==
- { :error, { 1, "syntax error before: ", "1" } }
+ {:error, {1, "syntax error before: ", "1"}}
assert_raise SyntaxError, fn ->
Code.string_to_quoted!("a.1")
@@ -90,7 +90,7 @@ defmodule CodeTest do
end
test :string_to_quoted! do
- assert Code.string_to_quoted!("1 + 2") == { :+, [line: 1], [1, 2] }
+ assert Code.string_to_quoted!("1 + 2") == {:+, [line: 1], [1, 2]}
assert_raise SyntaxError, fn ->
Code.string_to_quoted!("a.1")
@@ -111,21 +111,21 @@ defmodule CodeTest do
end
test :compile_string_works_accross_lexical_scopes do
- assert [{ CompileCrossSample, _ }] = Code.compile_string("CodeTest.genmodule CompileCrossSample")
+ assert [{CompileCrossSample, _}] = Code.compile_string("CodeTest.genmodule CompileCrossSample")
after
:code.purge CompileCrossSample
:code.delete CompileCrossSample
end
test :compile_string do
- assert [{ CompileStringSample, _ }] = Code.compile_string("defmodule CompileStringSample, do: :ok")
+ assert [{CompileStringSample, _}] = Code.compile_string("defmodule CompileStringSample, do: :ok")
after
:code.purge CompileSimpleSample
:code.delete CompileSimpleSample
end
test :compile_quoted do
- assert [{ CompileQuotedSample, _ }] = Code.compile_string("defmodule CompileQuotedSample, do: :ok")
+ assert [{CompileQuotedSample, _}] = Code.compile_string("defmodule CompileQuotedSample, do: :ok")
after
:code.purge CompileQuotedSample
:code.delete CompileQuotedSample
diff --git a/lib/elixir/test/elixir/deprecated_record/access_test.exs b/lib/elixir/test/elixir/deprecated_record/access_test.exs
index 647259006..1fa4646b6 100644
--- a/lib/elixir/test/elixir/deprecated_record/access_test.exs
+++ b/lib/elixir/test/elixir/deprecated_record/access_test.exs
@@ -10,8 +10,8 @@ defmodule Record.AccessTest do
end
test "access with keywords" do
- assert User[] == { User, 0, nil }
- assert User[age: 1] == { User, 1, nil }
+ assert User[] == {User, 0, nil}
+ assert User[age: 1] == {User, 1, nil}
end
test "access with variable inside match" do
@@ -21,8 +21,8 @@ defmodule Record.AccessTest do
test "access match on record name" do
assert is_user(User.new) == true
- assert is_user({ Access.AtomTest, 1 }) == false
- assert is_user({ User, 1, 2, 3 }) == false
+ assert is_user({Access.AtomTest, 1}) == false
+ assert is_user({User, 1, 2, 3}) == false
end
test "access with field match" do
diff --git a/lib/elixir/test/elixir/deprecated_record/private_test.exs b/lib/elixir/test/elixir/deprecated_record/private_test.exs
index 5a804f51b..92cf847ab 100644
--- a/lib/elixir/test/elixir/deprecated_record/private_test.exs
+++ b/lib/elixir/test/elixir/deprecated_record/private_test.exs
@@ -105,7 +105,7 @@ defmodule Record.PrivateTest do
end
test "defmacros" do
- assert { :_macro, 2, 3 } = Macros.macro()
+ assert {:_macro, 2, 3} = Macros.macro()
end
defrecordp :match, [:name]
diff --git a/lib/elixir/test/elixir/deprecated_record/record_test.exs b/lib/elixir/test/elixir/deprecated_record/record_test.exs
index 9faae2a96..5cff4e038 100644
--- a/lib/elixir/test/elixir/deprecated_record/record_test.exs
+++ b/lib/elixir/test/elixir/deprecated_record/record_test.exs
@@ -110,7 +110,7 @@ defmodule Record.DeprecatedTest do
defmacrop compose_dynamic(opts) do
quote do
- RecordTest.DynamicName[unquote_splicing(opts), { :b, "b" }]
+ RecordTest.DynamicName[unquote_splicing(opts), {:b, "b"}]
end
end
@@ -138,14 +138,14 @@ defmodule Record.DeprecatedTest do
end
test :optimizable do
- assert { :b, 1 } in RecordTest.SomeRecord.__record__(:optimizable)
- assert { :b, 2 } in RecordTest.SomeRecord.__record__(:optimizable)
- assert { :update_b, 2 } in RecordTest.SomeRecord.__record__(:optimizable)
- refute { :update_b, 2 } in RecordTest.DynamicName.__record__(:optimizable)
+ assert {:b, 1} in RecordTest.SomeRecord.__record__(:optimizable)
+ assert {:b, 2} in RecordTest.SomeRecord.__record__(:optimizable)
+ assert {:update_b, 2} in RecordTest.SomeRecord.__record__(:optimizable)
+ refute {:update_b, 2} in RecordTest.DynamicName.__record__(:optimizable)
end
test :result do
- assert { :module, _, _, "result"} = (defrecord WithResult, foo: :bar do
+ assert {:module, _, _, "result"} = (defrecord WithResult, foo: :bar do
"result"
end)
end
@@ -181,7 +181,7 @@ defmodule Record.DeprecatedTest do
end
test :custom_record do
- { :module, _, binary, _ } =
+ {:module, _, binary, _} =
defmodule CustomRecord do
Record.deffunctions [:name, :age], __ENV__
Record.deftypes [:name, :age], [name: :binary, age: :integer], __ENV__
@@ -192,10 +192,10 @@ defmodule Record.DeprecatedTest do
record = CustomRecord.new
assert CustomRecord.__record__(:index, :name) == record.__record__(:index, :name)
- assert Enum.any?(Kernel.Typespec.beam_types(binary), &match?({ :type,{ :t, _, _ } }, &1))
+ assert Enum.any?(Kernel.Typespec.beam_types(binary), &match?({:type,{:t, _, _}}, &1))
end
defp empty_tuple, do: {}
- defp a_tuple, do: { :foo, :bar, :baz }
+ defp a_tuple, do: {:foo, :bar, :baz}
defp a_list, do: [ :foo, :bar, :baz ]
end
diff --git a/lib/elixir/test/elixir/dict_test.exs b/lib/elixir/test/elixir/dict_test.exs
index 36fe5c495..ce19f19a0 100644
--- a/lib/elixir/test/elixir/dict_test.exs
+++ b/lib/elixir/test/elixir/dict_test.exs
@@ -11,7 +11,7 @@ defmodule TestDict do
end
def update(%{__struct__: TestDict, list: list} = map, key, initial, fun) do
- %{ map | list: update(list, key, initial, fun) }
+ %{map | list: update(list, key, initial, fun)}
end
def update([{key, value}|list], key, _initial, fun) do
@@ -277,18 +277,18 @@ defmodule DictTest.Common do
test "split/2 with match" do
dict = int_dict()
- { take, drop } = Dict.split(dict, [1])
+ {take, drop} = Dict.split(dict, [1])
assert take == dict
assert drop == new_dict([])
- { take, drop } = Dict.split(dict, [1.0])
+ {take, drop} = Dict.split(dict, [1.0])
assert take == new_dict([])
assert drop == dict
end
test "split/2 with enum" do
dict = int_dict()
- { take, drop } = Dict.split(dict, 1..3)
+ {take, drop} = Dict.split(dict, 1..3)
assert take == dict
assert drop == new_dict([])
end
@@ -365,10 +365,10 @@ defmodule DictTest.Common do
dict = new_dict()
assert Enum.empty?(new_dict([]))
refute Enum.empty?(dict)
- assert Enum.member?(dict, { "first_key", 1 })
- refute Enum.member?(dict, { "first_key", 2 })
+ assert Enum.member?(dict, {"first_key", 1})
+ refute Enum.member?(dict, {"first_key", 2})
assert Enum.count(dict) == 2
- assert Enum.reduce(dict, 0, fn({ k, v }, acc) -> v + acc end) == 3
+ assert Enum.reduce(dict, 0, fn({k, v}, acc) -> v + acc end) == 3
end
test "is collectable" do
@@ -376,7 +376,7 @@ defmodule DictTest.Common do
assert Dict.size(dict) == 2
assert Enum.sort(dict) == [{"first_key", 1}, {"second_key", 2}]
- dict = new_dict([{1}, {2}, {3}], fn {x} -> { <<x + 64>>, x } end)
+ dict = new_dict([{1}, {2}, {3}], fn {x} -> {<<x + 64>>, x} end)
assert Dict.size(dict) == 3
assert Enum.sort(dict) == [{"A", 1}, {"B", 2}, {"C", 3}]
@@ -388,7 +388,7 @@ defmodule DictTest.Common do
list = Dict.to_list(dict)
assert Enum.zip(list, list) == Enum.zip(dict, dict)
- dict = new_dict(1..120, fn i -> { i, i } end)
+ dict = new_dict(1..120, fn i -> {i, i} end)
list = Dict.to_list(dict)
assert Enum.zip(list, list) == Enum.zip(dict, dict)
end
diff --git a/lib/elixir/test/elixir/enum_test.exs b/lib/elixir/test/elixir/enum_test.exs
index 2d79ff3f8..0dcea2bd7 100644
--- a/lib/elixir/test/elixir/enum_test.exs
+++ b/lib/elixir/test/elixir/enum_test.exs
@@ -136,10 +136,10 @@ defmodule EnumTest.List do
end
test :fetch do
- assert Enum.fetch([2, 4, 6], 0) == { :ok, 2 }
- assert Enum.fetch([2, 4, 6], 2) == { :ok, 6 }
+ assert Enum.fetch([2, 4, 6], 0) == {:ok, 2}
+ assert Enum.fetch([2, 4, 6], 2) == {:ok, 6}
assert Enum.fetch([2, 4, 6], 4) == :error
- assert Enum.fetch([2, 4, 6], -2) == { :ok, 4}
+ assert Enum.fetch([2, 4, 6], -2) == {:ok, 4}
assert Enum.fetch([2, 4, 6], -4) == :error
end
@@ -166,18 +166,18 @@ defmodule EnumTest.List do
end
test :flat_map_reduce do
- assert Enum.flat_map_reduce([1, 2, 3], 0, &{ [&1, &2], &1 + &2 }) ==
- { [1, 0, 2, 1, 3, 3], 6 }
+ assert Enum.flat_map_reduce([1, 2, 3], 0, &{[&1, &2], &1 + &2}) ==
+ {[1, 0, 2, 1, 3, 3], 6}
assert Enum.flat_map_reduce(1..100, 0, fn i, acc ->
- if acc < 3, do: { [i], acc + 1 }, else: { :halt, acc }
- end) == { [1,2,3], 3 }
+ if acc < 3, do: {[i], acc + 1}, else: {:halt, acc}
+ end) == {[1,2,3], 3}
end
test :group_by do
assert Enum.group_by([], fn -> nil end) == %{}
assert Enum.group_by(1..6, &rem(&1, 3)) ==
- %{ 0 => [6, 3], 1 => [4, 1], 2 => [5, 2] }
+ %{0 => [6, 3], 1 => [4, 1], 2 => [5, 2]}
result = Enum.group_by(1..6, %{3 => :default}, &rem(&1, 3))
assert result[0] == [6, 3]
@@ -190,8 +190,8 @@ defmodule EnumTest.List do
assert Enum.into(%{a: 1, b: 2}, []) == [a: 1, b: 2]
assert Enum.into([1, 2, 3], "numbers: ", &to_string/1) == "numbers: 123"
assert Enum.into([1, 2, 3], fn
- func, { :cont, x } when is_function(func) -> [x]
- list, { :cont, x } -> [x|list]
+ func, {:cont, x} when is_function(func) -> [x]
+ list, {:cont, x} -> [x|list]
list, _ -> list
end) == [3, 2, 1]
end
@@ -227,13 +227,13 @@ defmodule EnumTest.List do
end
test :map_reduce do
- assert Enum.map_reduce([], 1, fn(x, acc) -> { x * 2, x + acc } end) == { [], 1 }
- assert Enum.map_reduce([1, 2, 3], 1, fn(x, acc) -> { x * 2, x + acc } end) == { [2, 4, 6], 7 }
+ assert Enum.map_reduce([], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[], 1}
+ assert Enum.map_reduce([1, 2, 3], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7}
end
test :partition do
- assert Enum.partition([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == { [2], [1, 3] }
- assert Enum.partition([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == { [2, 4, 6], [] }
+ assert Enum.partition([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]}
+ assert Enum.partition([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == {[2, 4, 6], []}
end
test :reduce do
@@ -277,25 +277,25 @@ defmodule EnumTest.List do
end
test :split do
- assert Enum.split([1, 2, 3], 0) == { [], [1, 2, 3] }
- assert Enum.split([1, 2, 3], 1) == { [1], [2, 3] }
- assert Enum.split([1, 2, 3], 2) == { [1, 2], [3] }
- assert Enum.split([1, 2, 3], 3) == { [1, 2, 3], [] }
- assert Enum.split([1, 2, 3], 4) == { [1, 2, 3], [] }
- assert Enum.split([], 3) == { [], [] }
- assert Enum.split([1, 2, 3], -1) == { [1, 2], [3] }
- assert Enum.split([1, 2, 3], -2) == { [1], [2, 3] }
- assert Enum.split([1, 2, 3], -3) == { [], [1, 2, 3] }
- assert Enum.split([1, 2, 3], -10) == { [], [1, 2, 3] }
+ assert Enum.split([1, 2, 3], 0) == {[], [1, 2, 3]}
+ assert Enum.split([1, 2, 3], 1) == {[1], [2, 3]}
+ assert Enum.split([1, 2, 3], 2) == {[1, 2], [3]}
+ assert Enum.split([1, 2, 3], 3) == {[1, 2, 3], []}
+ assert Enum.split([1, 2, 3], 4) == {[1, 2, 3], []}
+ assert Enum.split([], 3) == {[], []}
+ assert Enum.split([1, 2, 3], -1) == {[1, 2], [3]}
+ assert Enum.split([1, 2, 3], -2) == {[1], [2, 3]}
+ assert Enum.split([1, 2, 3], -3) == {[], [1, 2, 3]}
+ assert Enum.split([1, 2, 3], -10) == {[], [1, 2, 3]}
end
test :split_while do
- assert Enum.split_while([1, 2, 3], fn(_) -> false end) == { [], [1, 2, 3] }
- assert Enum.split_while([1, 2, 3], fn(_) -> true end) == { [1, 2, 3], [] }
- assert Enum.split_while([1, 2, 3], fn(x) -> x > 2 end) == { [], [1, 2, 3] }
- assert Enum.split_while([1, 2, 3], fn(x) -> x > 3 end) == { [], [1, 2, 3] }
- assert Enum.split_while([1, 2, 3], fn(x) -> x < 3 end) == { [1, 2], [3] }
- assert Enum.split_while([], fn(_) -> true end) == { [], [] }
+ assert Enum.split_while([1, 2, 3], fn(_) -> false end) == {[], [1, 2, 3]}
+ assert Enum.split_while([1, 2, 3], fn(_) -> true end) == {[1, 2, 3], []}
+ assert Enum.split_while([1, 2, 3], fn(x) -> x > 2 end) == {[], [1, 2, 3]}
+ assert Enum.split_while([1, 2, 3], fn(x) -> x > 3 end) == {[], [1, 2, 3]}
+ assert Enum.split_while([1, 2, 3], fn(x) -> x < 3 end) == {[1, 2], [3]}
+ assert Enum.split_while([], fn(_) -> true end) == {[], []}
end
test :sum do
@@ -344,7 +344,7 @@ defmodule EnumTest.List do
test :traverse do
assert Enum.traverse([1, 2, 3], &(&1 * &1)) == [1, 4, 9]
- assert Enum.traverse(%{a: 1, b: 2}, fn { k, v } -> { k, v*2 } end) == %{a: 2, b: 4}
+ assert Enum.traverse(%{a: 1, b: 2}, fn {k, v} -> {k, v*2} end) == %{a: 2, b: 4}
end
test :uniq do
@@ -673,10 +673,10 @@ defmodule EnumTest.Range do
test :map_reduce do
range = 1..0
- assert Enum.map_reduce(range, 1, fn(x, acc) -> { x * 2, x + acc } end) == { [2, 0], 2 }
+ assert Enum.map_reduce(range, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 0], 2}
range = 1..3
- assert Enum.map_reduce(range, 1, fn(x, acc) -> { x * 2, x + acc } end) == { [2, 4, 6], 7 }
+ assert Enum.map_reduce(range, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7}
end
test :max do
@@ -703,7 +703,7 @@ defmodule EnumTest.Range do
test :partition do
range = 1..3
- assert Enum.partition(range, fn(x) -> rem(x, 2) == 0 end) == { [2], [1, 3] }
+ assert Enum.partition(range, fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]}
end
test :reduce do
@@ -795,30 +795,30 @@ defmodule EnumTest.Range do
test :split do
range = 1..3
- assert Enum.split(range, 0) == { [], [1, 2, 3] }
- assert Enum.split(range, 1) == { [1], [2, 3] }
- assert Enum.split(range, 2) == { [1, 2], [3] }
- assert Enum.split(range, 3) == { [1, 2, 3], [] }
- assert Enum.split(range, 4) == { [1, 2, 3], [] }
- assert Enum.split(range, -1) == { [1, 2], [3] }
- assert Enum.split(range, -2) == { [1], [2, 3] }
- assert Enum.split(range, -3) == { [], [1, 2, 3] }
- assert Enum.split(range, -10) == { [], [1, 2, 3] }
+ assert Enum.split(range, 0) == {[], [1, 2, 3]}
+ assert Enum.split(range, 1) == {[1], [2, 3]}
+ assert Enum.split(range, 2) == {[1, 2], [3]}
+ assert Enum.split(range, 3) == {[1, 2, 3], []}
+ assert Enum.split(range, 4) == {[1, 2, 3], []}
+ assert Enum.split(range, -1) == {[1, 2], [3]}
+ assert Enum.split(range, -2) == {[1], [2, 3]}
+ assert Enum.split(range, -3) == {[], [1, 2, 3]}
+ assert Enum.split(range, -10) == {[], [1, 2, 3]}
range = 1..0
- assert Enum.split(range, 3) == { [1, 0], [] }
+ assert Enum.split(range, 3) == {[1, 0], []}
end
test :split_while do
range = 1..3
- assert Enum.split_while(range, fn(_) -> false end) == { [], [1, 2, 3] }
- assert Enum.split_while(range, fn(_) -> true end) == { [1, 2, 3], [] }
- assert Enum.split_while(range, fn(x) -> x > 2 end) == { [], [1, 2, 3] }
- assert Enum.split_while(range, fn(x) -> x > 3 end) == { [], [1, 2, 3] }
- assert Enum.split_while(range, fn(x) -> x < 3 end) == { [1, 2], [3] }
+ assert Enum.split_while(range, fn(_) -> false end) == {[], [1, 2, 3]}
+ assert Enum.split_while(range, fn(_) -> true end) == {[1, 2, 3], []}
+ assert Enum.split_while(range, fn(x) -> x > 2 end) == {[], [1, 2, 3]}
+ assert Enum.split_while(range, fn(x) -> x > 3 end) == {[], [1, 2, 3]}
+ assert Enum.split_while(range, fn(x) -> x < 3 end) == {[1, 2], [3]}
range = 1..0
- assert Enum.split_while(range, fn(_) -> true end) == { [1, 0], [] }
+ assert Enum.split_while(range, fn(_) -> true end) == {[1, 0], []}
end
test :sum do
@@ -886,7 +886,7 @@ defmodule EnumTest.SideEffects do
import PathHelpers
test "take with side effects" do
- stream = Stream.unfold(1, fn x -> IO.puts x; { x, x + 1 } end)
+ stream = Stream.unfold(1, fn x -> IO.puts x; {x, x + 1} end)
assert capture_io(fn ->
Enum.take(stream, 1)
end) == "1\n"
diff --git a/lib/elixir/test/elixir/exception_test.exs b/lib/elixir/test/elixir/exception_test.exs
index e58602d31..f686a9be0 100644
--- a/lib/elixir/test/elixir/exception_test.exs
+++ b/lib/elixir/test/elixir/exception_test.exs
@@ -49,7 +49,7 @@ defmodule Kernel.ExceptionTest do
test "format_stacktrace_entry with fun" do
assert Exception.format_stacktrace_entry({fn(x) -> x end, [1], []}) =~ ~r/#Function<.+>\(1\)/
- assert Exception.format_stacktrace_entry({fn(x, y) -> { x, y } end, 2, []}) =~ ~r"#Function<.+>/2"
+ assert Exception.format_stacktrace_entry({fn(x, y) -> {x, y} end, 2, []}) =~ ~r"#Function<.+>/2"
end
test "format_mfa" do
@@ -113,6 +113,6 @@ defmodule Kernel.ExceptionTest do
end
defp empty_tuple, do: {}
- defp a_tuple, do: { :foo, :bar, :baz }
+ defp a_tuple, do: {:foo, :bar, :baz}
defp a_list, do: [ :foo, :bar, :baz ]
end
diff --git a/lib/elixir/test/elixir/file_test.exs b/lib/elixir/test/elixir/file_test.exs
index 36d782598..2d6c4f015 100644
--- a/lib/elixir/test/elixir/file_test.exs
+++ b/lib/elixir/test/elixir/file_test.exs
@@ -50,7 +50,7 @@ defmodule FileTest do
File.mkdir(dest)
try do
- assert File.cp(src, dest) == { :error, :eisdir }
+ assert File.cp(src, dest) == {:error, :eisdir}
after
File.rm_rf dest
end
@@ -72,7 +72,7 @@ defmodule FileTest do
test :cp_with_src_dir do
src = fixture_path("cp_r")
dest = tmp_path("tmp.file")
- assert File.cp(src, dest) == { :error, :eisdir }
+ assert File.cp(src, dest) == {:error, :eisdir}
end
test :cp_with_conflict do
@@ -141,7 +141,7 @@ defmodule FileTest do
try do
assert File.exists?(dest)
- assert File.cp_r(src, dest) == { :ok, [dest] }
+ assert File.cp_r(src, dest) == {:ok, [dest]}
assert File.exists?(dest)
after
File.rm(dest)
@@ -167,7 +167,7 @@ defmodule FileTest do
try do
refute File.exists?(dest)
- assert File.cp_r(src, dest) == { :ok, [dest] }
+ assert File.cp_r(src, dest) == {:ok, [dest]}
assert File.exists?(dest)
after
File.rm_rf dest
@@ -185,7 +185,7 @@ defmodule FileTest do
refute File.exists?(tmp_path("tmp/a/a/2.txt"))
refute File.exists?(tmp_path("tmp/b/3.txt"))
- { :ok, files } = File.cp_r(src, dest)
+ {:ok, files} = File.cp_r(src, dest)
assert length(files) == 7
assert tmp_path("tmp/a") in files
assert tmp_path("tmp/a/1.txt") in files
@@ -219,7 +219,7 @@ defmodule FileTest do
refute File.exists?(tmp_path("tmp/a/a/2.txt"))
refute File.exists?(tmp_path("tmp/b/3.txt"))
- { :ok, files } = File.cp_r(src, dest)
+ {:ok, files} = File.cp_r(src, dest)
assert length(files) == 7
assert File.exists?(tmp_path("tmp/a/1.txt"))
@@ -233,7 +233,7 @@ defmodule FileTest do
test :cp_r_with_src_unknown do
src = fixture_path("unknown")
dest = tmp_path("tmp")
- assert File.cp_r(src, dest) == { :error, :enoent, src }
+ assert File.cp_r(src, dest) == {:error, :enoent, src}
end
test :cp_r_with_dir_and_file_conflict do
@@ -260,7 +260,7 @@ defmodule FileTest do
refute File.exists?(tmp_path("tmp/a/a/2.txt"))
refute File.exists?(tmp_path("tmp/b/3.txt"))
- { :ok, files } = File.cp_r(src, dest)
+ {:ok, files} = File.cp_r(src, dest)
assert length(files) == 7
assert Enum.all?(files, &is_binary/1)
@@ -379,11 +379,11 @@ defmodule FileTest do
end
test :ls do
- { :ok, value } = File.ls(fixture_path)
+ {:ok, value} = File.ls(fixture_path)
assert "code_sample.exs" in value
assert "file.txt" in value
- { :error, :enoent } = File.ls(fixture_path("non-existent-subdirectory"))
+ {:error, :enoent} = File.ls(fixture_path("non-existent-subdirectory"))
end
test :ls! do
@@ -400,17 +400,17 @@ defmodule FileTest do
use Elixir.FileCase
test :read_with_binary do
- assert { :ok, "FOO\n" } = File.read(fixture_path("file.txt"))
- assert { :error, :enoent } = File.read(fixture_path("missing.txt"))
+ assert {:ok, "FOO\n"} = File.read(fixture_path("file.txt"))
+ assert {:error, :enoent} = File.read(fixture_path("missing.txt"))
end
test :read_with_list do
- assert { :ok, "FOO\n" } = File.read(Path.expand('fixtures/file.txt', __DIR__))
- assert { :error, :enoent } = File.read(Path.expand('fixtures/missing.txt', __DIR__))
+ assert {:ok, "FOO\n"} = File.read(Path.expand('fixtures/file.txt', __DIR__))
+ assert {:error, :enoent} = File.read(Path.expand('fixtures/missing.txt', __DIR__))
end
test :read_with_utf8 do
- assert { :ok, "Русский\n日\n" } = File.read(Path.expand('fixtures/utf8.txt', __DIR__))
+ assert {:ok, "Русский\n日\n"} = File.read(Path.expand('fixtures/utf8.txt', __DIR__))
end
test :read! do
@@ -427,7 +427,7 @@ defmodule FileTest do
try do
refute File.exists?(fixture)
assert File.write(fixture, 'test text') == :ok
- assert File.read(fixture) == { :ok, "test text" }
+ assert File.read(fixture) == {:ok, "test text"}
after
File.rm(fixture)
end
@@ -438,7 +438,7 @@ defmodule FileTest do
try do
refute File.exists?(fixture)
assert File.write(fixture, "Русский\n日\n") == :ok
- assert { :ok, "Русский\n日\n" } == File.read(fixture)
+ assert {:ok, "Русский\n日\n"} == File.read(fixture)
after
File.rm(fixture)
end
@@ -450,32 +450,32 @@ defmodule FileTest do
refute File.exists?(fixture)
assert File.write(fixture, "Русский\n日\n") == :ok
assert File.write(fixture, "test text", [:append]) == :ok
- assert { :ok, "Русский\n日\ntest text" } == File.read(fixture)
+ assert {:ok, "Русский\n日\ntest text"} == File.read(fixture)
after
File.rm(fixture)
end
end
test :open_file_without_modes do
- { :ok, file } = File.open(fixture_path("file.txt"))
+ {:ok, file} = File.open(fixture_path("file.txt"))
assert IO.gets(file, "") == "FOO\n"
assert File.close(file) == :ok
end
test :open_file_with_char_list do
- { :ok, file } = File.open(fixture_path("file.txt"), [:char_list])
+ {:ok, file} = File.open(fixture_path("file.txt"), [:char_list])
assert IO.gets(file, "") == 'FOO\n'
assert File.close(file) == :ok
end
test :open_utf8_by_default do
- { :ok, file } = File.open(fixture_path("utf8.txt"), [:utf8])
+ {:ok, file} = File.open(fixture_path("utf8.txt"), [:utf8])
assert IO.gets(file, "") == "Русский\n"
assert File.close(file) == :ok
end
test :open_readonly_by_default do
- { :ok, file } = File.open(fixture_path("file.txt"))
+ {:ok, file} = File.open(fixture_path("file.txt"))
assert_raise ArgumentError, fn -> IO.write(file, "foo") end
assert File.close(file) == :ok
end
@@ -483,10 +483,10 @@ defmodule FileTest do
test :open_with_write_permission do
fixture = tmp_path("tmp_text.txt")
try do
- { :ok, file } = File.open(fixture, [:write])
+ {:ok, file} = File.open(fixture, [:write])
assert IO.write(file, "foo") == :ok
assert File.close(file) == :ok
- assert File.read(fixture) == { :ok, "foo" }
+ assert File.read(fixture) == {:ok, "foo"}
after
File.rm(fixture)
end
@@ -495,23 +495,23 @@ defmodule FileTest do
test :open_with_binwrite_permission do
fixture = tmp_path("tmp_text.txt")
try do
- { :ok, file } = File.open(fixture, [:write])
+ {:ok, file} = File.open(fixture, [:write])
assert IO.binwrite(file, "Русский") == :ok
assert File.close(file) == :ok
- assert File.read(fixture) == { :ok, "Русский" }
+ assert File.read(fixture) == {:ok, "Русский"}
after
File.rm(fixture)
end
end
test :open_utf8_and_charlist do
- { :ok, file } = File.open(fixture_path("utf8.txt"), [:char_list, :utf8])
+ {:ok, file} = File.open(fixture_path("utf8.txt"), [:char_list, :utf8])
assert IO.gets(file, "") == [1056, 1091, 1089, 1089, 1082, 1080, 1081, 10]
assert File.close(file) == :ok
end
test :open_respects_encoding do
- { :ok, file } = File.open(fixture_path("utf8.txt"), [{:encoding, :latin1}])
+ {:ok, file} = File.open(fixture_path("utf8.txt"), [{:encoding, :latin1}])
assert IO.gets(file, "") == <<195, 144, 194, 160, 195, 145, 194, 131, 195, 145, 194, 129, 195, 145, 194, 129, 195, 144, 194, 186, 195, 144, 194, 184, 195, 144, 194, 185, 10>>
assert File.close(file) == :ok
end
@@ -522,7 +522,7 @@ defmodule FileTest do
test :open_a_file_with_function do
file = fixture_path("file.txt")
- assert File.open(file, &IO.read(&1, :line)) == { :ok, "FOO\n" }
+ assert File.open(file, &IO.read(&1, :line)) == {:ok, "FOO\n"}
end
test :open_a_missing_file! do
@@ -673,7 +673,7 @@ defmodule FileTest do
end
defp io_error?(result) do
- { :error, errorcode } = result
+ {:error, errorcode} = result
errorcode in [:enotdir, :eio, :enoent, :eisdir]
end
end
@@ -690,11 +690,11 @@ defmodule FileTest do
end
test :rm_file_with_dir do
- assert File.rm(fixture_path) == { :error, :eperm }
+ assert File.rm(fixture_path) == {:error, :eperm}
end
test :rm_nonexistent_file do
- assert File.rm('missing.txt') == { :error, :enoent }
+ assert File.rm('missing.txt') == {:error, :enoent}
end
test :rm! do
@@ -747,7 +747,7 @@ defmodule FileTest do
assert File.exists?(tmp_path("tmp/a/a/2.txt"))
assert File.exists?(tmp_path("tmp/b/3.txt"))
- { :ok, files } = File.rm_rf(fixture)
+ {:ok, files} = File.rm_rf(fixture)
assert length(files) == 7
assert fixture in files
assert tmp_path("tmp/a/1.txt") in files
@@ -769,7 +769,7 @@ defmodule FileTest do
if File.exists?(from) or not is_win? do
assert File.exists?(from)
- { :ok, files } = File.rm_rf(from)
+ {:ok, files} = File.rm_rf(from)
assert length(files) == 1
assert File.exists?(Path.join(to, "hello"))
@@ -788,7 +788,7 @@ defmodule FileTest do
assert File.exists?(tmp_path("tmp/a/a/2.txt"))
assert File.exists?(tmp_path("tmp/b/3.txt"))
- { :ok, files } = File.rm_rf(fixture)
+ {:ok, files} = File.rm_rf(fixture)
assert length(files) == 7
assert tmp_path("tmp") in files
assert Enum.all?(files, &is_binary/1)
@@ -802,17 +802,17 @@ defmodule FileTest do
test :rm_rf_with_file do
fixture = tmp_path("tmp")
File.write(fixture, "hello")
- assert File.rm_rf(fixture) == { :ok, [fixture] }
+ assert File.rm_rf(fixture) == {:ok, [fixture]}
end
test :rm_rf_with_unknown do
fixture = tmp_path("tmp.unknown")
- assert File.rm_rf(fixture) == { :ok, [] }
+ assert File.rm_rf(fixture) == {:ok, []}
end
test :rm_rf_with_invalid do
fixture = fixture_path "file.txt/path"
- assert File.rm_rf(fixture) == { :ok, [] }
+ assert File.rm_rf(fixture) == {:ok, []}
end
test :rm_rf! do
@@ -855,7 +855,7 @@ defmodule FileTest do
end
test :stat_with_invalid_file do
- assert { :error, _ } = File.stat("./invalid_file")
+ assert {:error, _} = File.stat("./invalid_file")
end
test :stat_with_invalid_file! do
@@ -873,7 +873,7 @@ defmodule FileTest do
File.open dest, [:write], fn(target) ->
Enum.into stream, IO.stream(target, :line), &String.replace(&1, "O", "A")
end
- assert File.read(dest) == { :ok, "FAA\n" }
+ assert File.read(dest) == {:ok, "FAA\n"}
after
File.rm(dest)
end
@@ -888,7 +888,7 @@ defmodule FileTest do
File.open dest, [:write], fn(target) ->
Enum.into stream, IO.binstream(target, :line), &String.replace(&1, "O", "A")
end
- assert File.read(dest) == { :ok, "FAA\n" }
+ assert File.read(dest) == {:ok, "FAA\n"}
after
File.rm(dest)
end
@@ -905,7 +905,7 @@ defmodule FileTest do
src = fixture_path("file.txt")
stream = File.stream!(src, [:utf8], 10)
assert %File.Stream{} = stream
- assert stream.modes == [{ :encoding, :utf8 }, :binary]
+ assert stream.modes == [{:encoding, :utf8}, :binary]
refute stream.raw
assert stream.line_or_bytes == 10
end
@@ -921,7 +921,7 @@ defmodule FileTest do
IO.write target, String.replace(line, "O", "A")
end
end
- assert File.read(dest) == { :ok, "FAA\n" }
+ assert File.read(dest) == {:ok, "FAA\n"}
after
File.rm(dest)
end
@@ -938,7 +938,7 @@ defmodule FileTest do
IO.write target, String.replace(line, "OO", "AA")
end
end
- assert File.read(dest) == { :ok, "FOO\n" }
+ assert File.read(dest) == {:ok, "FOO\n"}
after
File.rm(dest)
end
@@ -955,7 +955,7 @@ defmodule FileTest do
IO.write target, String.replace(line, "O", "A")
end
end
- assert File.read(dest) == { :ok, "FAA\n" }
+ assert File.read(dest) == {:ok, "FAA\n"}
after
File.rm(dest)
end
@@ -972,7 +972,7 @@ defmodule FileTest do
IO.write target, String.replace(line, "OO", "AA")
end
end
- assert File.read(dest) == { :ok, "FOO\n" }
+ assert File.read(dest) == {:ok, "FOO\n"}
after
File.rm(dest)
end
@@ -991,7 +991,7 @@ defmodule FileTest do
|> Enum.into(original)
assert stream == original
- assert File.read(dest) == { :ok, "FAA\n" }
+ assert File.read(dest) == {:ok, "FAA\n"}
after
File.rm(dest)
end
@@ -1012,7 +1012,7 @@ defmodule FileTest do
File.stream!(src, [:append])
|> Enum.into(original)
- assert File.read(dest) == { :ok, "FAA\nFOO\n" }
+ assert File.read(dest) == {:ok, "FAA\nFOO\n"}
after
File.rm(dest)
end
@@ -1023,8 +1023,8 @@ defmodule FileTest do
dest = tmp_path("tmp_test.txt")
try do
refute File.exists?(dest)
- assert File.copy(src, dest) == { :ok, 4 }
- assert File.read(dest) == { :ok, "FOO\n" }
+ assert File.copy(src, dest) == {:ok, 4}
+ assert File.read(dest) == {:ok, "FOO\n"}
after
File.rm(dest)
end
@@ -1035,8 +1035,8 @@ defmodule FileTest do
dest = tmp_path("tmp_test.txt")
try do
refute File.exists?(dest)
- assert File.copy(src, dest, 2) == { :ok, 2 }
- assert { :ok, "FO" } == File.read(dest)
+ assert File.copy(src, dest, 2) == {:ok, 2}
+ assert {:ok, "FO"} == File.read(dest)
after
File.rm(dest)
end
@@ -1045,7 +1045,7 @@ defmodule FileTest do
test :copy_with_invalid_file do
src = fixture_path("invalid.txt")
dest = tmp_path("tmp_test.txt")
- assert File.copy(src, dest, 2) == { :error, :enoent }
+ assert File.copy(src, dest, 2) == {:error, :enoent}
end
test :copy! do
@@ -1054,7 +1054,7 @@ defmodule FileTest do
try do
refute File.exists?(dest)
assert File.copy!(src, dest) == 4
- assert { :ok, "FOO\n" } == File.read(dest)
+ assert {:ok, "FOO\n"} == File.read(dest)
after
File.rm(dest)
end
@@ -1066,7 +1066,7 @@ defmodule FileTest do
try do
refute File.exists?(dest)
assert File.copy!(src, dest, 2) == 2
- assert { :ok, "FO" } == File.read(dest)
+ assert {:ok, "FO"} == File.read(dest)
after
File.rm(dest)
end
@@ -1081,7 +1081,7 @@ defmodule FileTest do
end
test :cwd_and_cd do
- { :ok, current } = File.cwd
+ {:ok, current} = File.cwd
try do
assert File.cd(fixture_path) == :ok
assert File.exists?("file.txt")
@@ -1122,12 +1122,12 @@ defmodule FileTest do
test :touch_with_no_file do
fixture = tmp_path("tmp_test.txt")
- time = { { 2010, 4, 17 }, { 14, 0, 0 }}
+ time = {{2010, 4, 17}, {14, 0, 0}}
try do
refute File.exists?(fixture)
assert File.touch(fixture, time) == :ok
- assert { :ok, "" } == File.read(fixture)
+ assert {:ok, ""} == File.read(fixture)
assert File.stat!(fixture).mtime == time
after
File.rm(fixture)
@@ -1261,12 +1261,12 @@ defmodule FileTest do
last_year :calendar.local_time
end
- defp last_year({ { year, month, day }, time }) do
- { { year - 1, month, day }, time }
+ defp last_year({{year, month, day}, time}) do
+ {{year - 1, month, day}, time}
end
defp io_error?(result) do
- { :error, errorcode } = result
+ {:error, errorcode} = result
errorcode in [:enotdir, :eio, :enoent, :eisdir]
end
end
diff --git a/lib/elixir/test/elixir/gen_event/behaviour_test.exs b/lib/elixir/test/elixir/gen_event/behaviour_test.exs
index e744bd3ca..b018099d8 100644
--- a/lib/elixir/test/elixir/gen_event/behaviour_test.exs
+++ b/lib/elixir/test/elixir/gen_event/behaviour_test.exs
@@ -9,11 +9,11 @@ defmodule GenEvent.BehaviourTest do
# Callbacks
def init(_) do
- { :ok, [] }
+ {:ok, []}
end
def handle_event({:notification, x}, notifications) do
- { :ok, [x|notifications] }
+ {:ok, [x|notifications]}
end
def handle_call(:notifications, notifications) do
@@ -22,7 +22,7 @@ defmodule GenEvent.BehaviourTest do
end
test "using defines callbacks" do
- { :ok, pid } = :gen_event.start_link
+ {:ok, pid} = :gen_event.start_link
:gen_event.add_handler(pid, MyEventHandler, [])
:gen_event.notify(pid, {:notification, 1})
diff --git a/lib/elixir/test/elixir/gen_server/behaviour_test.exs b/lib/elixir/test/elixir/gen_server/behaviour_test.exs
index 76ff7f44a..7e6806c68 100644
--- a/lib/elixir/test/elixir/gen_server/behaviour_test.exs
+++ b/lib/elixir/test/elixir/gen_server/behaviour_test.exs
@@ -19,19 +19,19 @@ defmodule GenServer.BehaviourTest do
# Callbacks
def handle_call(:pop, _from, [h|t]) do
- { :reply, h, t }
+ {:reply, h, t}
end
def handle_call(:terminate, _from, config) do
- { :stop, :normal, :ok, config }
+ {:stop, :normal, :ok, config}
end
def handle_call(request, from, config) do
super(request, from, config)
end
- def handle_cast({ :push, item }, config) do
- { :noreply, [item|config] }
+ def handle_cast({:push, item}, config) do
+ {:noreply, [item|config]}
end
def handle_cast(request, config) do
@@ -40,28 +40,28 @@ defmodule GenServer.BehaviourTest do
end
test "using defines callbacks" do
- assert { :ok, pid } = :gen_server.start_link(Sample, [:hello], [])
+ assert {:ok, pid} = :gen_server.start_link(Sample, [:hello], [])
assert :gen_server.call(pid, :pop) == :hello
- assert :gen_server.cast(pid, { :push, :world }) == :ok
+ assert :gen_server.cast(pid, {:push, :world}) == :ok
assert :gen_server.call(pid, :pop) == :world
end
test "call stops server on unknown requests" do
Process.flag(:trap_exit, true)
- assert { :ok, pid } = :gen_server.start_link(Sample, [:hello], [])
+ assert {:ok, pid} = :gen_server.start_link(Sample, [:hello], [])
catch_exit(:gen_server.call(pid, :unknown_request))
- assert_receive { :EXIT, ^pid, {:bad_call, :unknown_request} }
+ assert_receive {:EXIT, ^pid, {:bad_call, :unknown_request}}
after
Process.flag(:trap_exit, false)
end
test "cast stops server on unknown requests" do
Process.flag(:trap_exit, true)
- assert { :ok, pid } = :gen_server.start_link(Sample, [:hello], [])
+ assert {:ok, pid} = :gen_server.start_link(Sample, [:hello], [])
:gen_server.cast(pid, :unknown_request)
- assert_receive { :EXIT, ^pid, {:bad_cast, :unknown_request} }
+ assert_receive {:EXIT, ^pid, {:bad_cast, :unknown_request}}
after
Process.flag(:trap_exit, false)
end
diff --git a/lib/elixir/test/elixir/hash_dict_test.exs b/lib/elixir/test/elixir/hash_dict_test.exs
index 668a60a40..95cd93ef5 100644
--- a/lib/elixir/test/elixir/hash_dict_test.exs
+++ b/lib/elixir/test/elixir/hash_dict_test.exs
@@ -32,19 +32,19 @@ defmodule HashDictTest do
dict = filled_dict(8)
list = dict |> HashDict.to_list
assert length(list) == 8
- assert { 1, 1 } in list
+ assert {1, 1} in list
assert list == Enum.to_list(dict)
dict = filled_dict(20)
list = dict |> HashDict.to_list
assert length(list) == 20
- assert { 1, 1 } in list
+ assert {1, 1} in list
assert list == Enum.to_list(dict)
dict = filled_dict(120)
list = dict |> HashDict.to_list
assert length(list) == 120
- assert { 1, 1 } in list
+ assert {1, 1} in list
assert list == Enum.to_list(dict)
end
@@ -57,22 +57,22 @@ defmodule HashDictTest do
end
defp smoke_test(range) do
- { dict, _ } = Enum.reduce range, { HashDict.new, 1 }, fn(x, { acc, i }) ->
+ {dict, _} = Enum.reduce range, {HashDict.new, 1}, fn(x, {acc, i}) ->
acc = HashDict.put(acc, x, x)
assert HashDict.size(acc) == i
- { acc, i + 1 }
+ {acc, i + 1}
end
Enum.each range, fn(x) ->
assert HashDict.get(dict, x) == x
end
- { dict, _ } = Enum.reduce range, { dict, Enum.count(range) }, fn(x, { acc, i }) ->
+ {dict, _} = Enum.reduce range, {dict, Enum.count(range)}, fn(x, {acc, i}) ->
assert HashDict.size(acc) == i
acc = HashDict.delete(acc, x)
assert HashDict.size(acc) == i - 1
assert HashDict.get(acc, x) == nil
- { acc, i - 1 }
+ {acc, i - 1}
end
assert dict == HashDict.new
diff --git a/lib/elixir/test/elixir/inspect/algebra_test.exs b/lib/elixir/test/elixir/inspect/algebra_test.exs
index 666721601..60fc4b18b 100644
--- a/lib/elixir/test/elixir/inspect/algebra_test.exs
+++ b/lib/elixir/test/elixir/inspect/algebra_test.exs
@@ -30,8 +30,8 @@ defmodule Inspect.AlgebraTest do
test "break doc" do
# Consistence with definitions
- assert break("break") == { :doc_break, "break" }
- assert break("") == { :doc_break, "" }
+ assert break("break") == {:doc_break, "break"}
+ assert break("") == {:doc_break, ""}
# Wrong argument type
assert_raise FunctionClauseError, fn -> break(42) end
@@ -45,9 +45,9 @@ defmodule Inspect.AlgebraTest do
test "glue doc" do
# Consistence with definitions
- assert glue("a", "->", "b") == { :doc_cons,
- "a", { :doc_cons, { :doc_break, "->" }, "b" }
- }
+ assert glue("a", "->", "b") == {:doc_cons,
+ "a", {:doc_cons, {:doc_break, "->"}, "b"}
+ }
assert glue("a", "b") == glue("a", " ", "b")
# Wrong argument type
@@ -64,14 +64,14 @@ defmodule Inspect.AlgebraTest do
test "space doc" do
# Consistency with definitions
- assert space("a", "b") == { :doc_cons,
- "a", { :doc_cons, " ", "b" }
- }
+ assert space("a", "b") == {:doc_cons,
+ "a", {:doc_cons, " ", "b"}
+ }
end
test "nest doc" do
# Consistence with definitions
- assert nest(empty, 1) == { :doc_nest, 1, empty }
+ assert nest(empty, 1) == {:doc_nest, 1, empty}
assert nest(empty, 0) == :doc_nil
# Wrong argument type
@@ -89,7 +89,7 @@ defmodule Inspect.AlgebraTest do
test "line doc" do
# Consistency with definitions
assert line("a", "b") ==
- { :doc_cons, "a", { :doc_cons, :doc_line, "b" } }
+ {:doc_cons, "a", {:doc_cons, :doc_line, "b"}}
# Consistence of corresponding sdoc
assert factor(line("a", "b"), 1) == ["a", "\n", "b"]
@@ -103,8 +103,8 @@ defmodule Inspect.AlgebraTest do
test "group doc" do
# Consistency with definitions
assert group(glue("a", "b")) ==
- { :doc_group, { :doc_cons, "a", concat(break, "b") }}
- assert group(empty) == { :doc_group, empty }
+ {:doc_group, {:doc_cons, "a", concat(break, "b")}}
+ assert group(empty) == {:doc_group, empty}
# Consistence of corresponding sdoc
assert factor(glue("a", "b"), 1) == ["a", " ", "b"]
diff --git a/lib/elixir/test/elixir/inspect_test.exs b/lib/elixir/test/elixir/inspect_test.exs
index c57414f05..94917cf51 100644
--- a/lib/elixir/test/elixir/inspect_test.exs
+++ b/lib/elixir/test/elixir/inspect_test.exs
@@ -122,24 +122,24 @@ defmodule Inspect.TupleTest do
use ExUnit.Case
test :basic do
- assert inspect({ 1, "b", 3 }) == "{1, \"b\", 3}"
- assert inspect({ 1, "b", 3 }, [pretty: true, width: 1]) == "{1,\n \"b\",\n 3}"
+ assert inspect({1, "b", 3}) == "{1, \"b\", 3}"
+ assert inspect({1, "b", 3}, [pretty: true, width: 1]) == "{1,\n \"b\",\n 3}"
end
test :record_like do
- assert inspect({ :foo, :bar }) == "{:foo, :bar}"
+ assert inspect({:foo, :bar}) == "{:foo, :bar}"
end
test :with_builtin_like_record do
- assert inspect({ :list, 1 }) == "{:list, 1}"
+ assert inspect({:list, 1}) == "{:list, 1}"
end
test :with_record_like_tuple do
- assert inspect({ List, 1 }) == "{List, 1}"
+ assert inspect({List, 1}) == "{List, 1}"
end
test :with_record_like_pseudo_exception do
- assert inspect({ Other, :__exception__, 1 }) == "{Other, :__exception__, 1}"
+ assert inspect({Other, :__exception__, 1}) == "{Other, :__exception__, 1}"
end
defrecord Config, a: 1, b: []
@@ -159,14 +159,14 @@ defmodule Inspect.TupleTest do
defrecord Rec, value: 1
test :two_items_record do
- assert inspect({ Rec[value: 1], 1 }) == "{Inspect.TupleTest.Rec[value: 1], 1}"
+ assert inspect({Rec[value: 1], 1}) == "{Inspect.TupleTest.Rec[value: 1], 1}"
end
test :false_positives do
import ExUnit.CaptureIO
assert capture_io(:stderr, fn ->
- assert inspect({ Range, nil }) == "{Range, nil}"
+ assert inspect({Range, nil}) == "{Range, nil}"
end) =~ "** (Inspect.Error) Got FunctionClauseError with message no function clause matching in Inspect.Range.inspect/2"
end
@@ -175,7 +175,7 @@ defmodule Inspect.TupleTest do
end
test :with_limit do
- assert inspect({ 1, 2, 3, 4 }, limit: 3) == "{1, 2, 3, ...}"
+ assert inspect({1, 2, 3, 4}, limit: 3) == "{1, 2, 3, ...}"
end
test :with_records_false do
@@ -250,8 +250,8 @@ defmodule Inspect.MapTest do
use ExUnit.Case
test :basic do
- assert inspect(%{ 1 => "b" }) == "%{1 => \"b\"}"
- assert inspect(%{ 1 => "b", 2 => "c"}, [pretty: true, width: 1]) == "%{1 => \"b\",\n 2 => \"c\"}"
+ assert inspect(%{1 => "b"}) == "%{1 => \"b\"}"
+ assert inspect(%{1 => "b", 2 => "c"}, [pretty: true, width: 1]) == "%{1 => \"b\",\n 2 => \"c\"}"
end
test :keyword do
@@ -261,7 +261,7 @@ defmodule Inspect.MapTest do
end
test :with_limit do
- assert inspect(%{ 1 => 1, 2 => 2, 3 => 3, 4 => 4 }, limit: 3) == "%{1 => 1, 2 => 2, 3 => 3, ...}"
+ assert inspect(%{1 => 1, 2 => 2, 3 => 3, 4 => 4}, limit: 3) == "%{1 => 1, 2 => 2, 3 => 3, ...}"
end
defmodule Public do
@@ -339,8 +339,8 @@ defmodule Inspect.OthersTest do
:code.delete(V)
:code.purge(V)
- { :ok, anony } = :application.get_env(:elixir, :anony)
- { :ok, named } = :application.get_env(:elixir, :named)
+ {:ok, anony} = :application.get_env(:elixir, :anony)
+ {:ok, named} = :application.get_env(:elixir, :named)
assert inspect(anony) =~ ~r"#Function<0.\d+/0 in Inspect.OthersTest.V>"
assert inspect(named) =~ ~r"&Inspect.OthersTest.V.fun/0"
diff --git a/lib/elixir/test/elixir/io/ansi_test.exs b/lib/elixir/test/elixir/io/ansi_test.exs
index 1058b490e..5239d4354 100644
--- a/lib/elixir/test/elixir/io/ansi_test.exs
+++ b/lib/elixir/test/elixir/io/ansi_test.exs
@@ -6,7 +6,7 @@ defmodule IO.ANSITest do
test :escape_single do
assert IO.ANSI.escape("Hello, %{red}world!", true) ==
"Hello, #{IO.ANSI.red}world!#{IO.ANSI.reset}"
- assert IO.ANSI.escape("Hello, %{ red }world!", true) ==
+ assert IO.ANSI.escape("Hello, %{red}world!", true) ==
"Hello, #{IO.ANSI.red}world!#{IO.ANSI.reset}"
end
diff --git a/lib/elixir/test/elixir/io_test.exs b/lib/elixir/test/elixir/io_test.exs
index d13aff860..7d46dbbf5 100644
--- a/lib/elixir/test/elixir/io_test.exs
+++ b/lib/elixir/test/elixir/io_test.exs
@@ -5,25 +5,25 @@ defmodule IOTest do
import ExUnit.CaptureIO
test :read_with_count do
- { :ok, file } = File.open(Path.expand('fixtures/file.txt', __DIR__), [:char_list])
+ {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:char_list])
assert 'FOO' == IO.read(file, 3)
assert File.close(file) == :ok
end
test :read_with_utf8_and_binary do
- { :ok, file } = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
+ {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
assert "Русский" == IO.read(file, 7)
assert File.close(file) == :ok
end
test :binread do
- { :ok, file } = File.open(Path.expand('fixtures/utf8.txt', __DIR__))
+ {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__))
assert "Русский" == IO.binread(file, 14)
assert File.close(file) == :ok
end
test :getn do
- { :ok, file } = File.open(Path.expand('fixtures/file.txt', __DIR__))
+ {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__))
assert "F" == IO.getn(file, "")
assert "O" == IO.getn(file, "")
assert "O" == IO.getn(file, "")
@@ -33,47 +33,47 @@ defmodule IOTest do
end
test :getn_with_count do
- { :ok, file } = File.open(Path.expand('fixtures/file.txt', __DIR__), [:char_list])
+ {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:char_list])
assert 'FOO' == IO.getn(file, "", 3)
assert File.close(file) == :ok
end
test :getn_with_utf8_and_binary do
- { :ok, file } = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
+ {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
assert "Русский" == IO.getn(file, "", 7)
assert File.close(file) == :ok
end
test :gets do
- { :ok, file } = File.open(Path.expand('fixtures/file.txt', __DIR__), [:char_list])
+ {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:char_list])
assert 'FOO\n' == IO.gets(file, "")
assert :eof == IO.gets(file, "")
assert File.close(file) == :ok
end
test :gets_with_utf8_and_binary do
- { :ok, file } = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
+ {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
assert "Русский\n" == IO.gets(file, "")
assert "日\n" == IO.gets(file, "")
assert File.close(file) == :ok
end
test :readline do
- { :ok, file } = File.open(Path.expand('fixtures/file.txt', __DIR__))
+ {:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__))
assert "FOO\n" == IO.read(file, :line)
assert :eof == IO.read(file, :line)
assert File.close(file) == :ok
end
test :readline_with_utf8_and_binary do
- { :ok, file } = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
+ {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
assert "Русский\n" == IO.read(file, :line)
assert "日\n" == IO.read(file, :line)
assert File.close(file) == :ok
end
test :binreadline do
- { :ok, file } = File.open(Path.expand('fixtures/utf8.txt', __DIR__))
+ {:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__))
assert "Русский\n" == IO.binread(file, :line)
assert "日\n" == IO.binread(file, :line)
assert File.close(file) == :ok
diff --git a/lib/elixir/test/elixir/kernel/case_test.exs b/lib/elixir/test/elixir/kernel/case_test.exs
index 08d2b550a..f6b312777 100644
--- a/lib/elixir/test/elixir/kernel/case_test.exs
+++ b/lib/elixir/test/elixir/kernel/case_test.exs
@@ -8,20 +8,20 @@ defmodule Kernel.CaseTest do
end
test :nested_variables do
- assert vars_case(400, 1) == { 400, 1 }
- assert vars_case(401, 1) == { 400, -1 }
- assert vars_case(0, -1) == { 0, -1 }
- assert vars_case(-1, -1) == { 0, 1 }
+ assert vars_case(400, 1) == {400, 1}
+ assert vars_case(401, 1) == {400, -1}
+ assert vars_case(0, -1) == {0, -1}
+ assert vars_case(-1, -1) == {0, 1}
end
test :nested_vars_match do
- x = { :error, { :ok, :done } }
+ x = {:error, {:ok, :done}}
assert (case x do
- { :ok, right } ->
+ {:ok, right} ->
right
- { _left, right } ->
+ {_left, right} ->
case right do
- { :ok, right } -> right
+ {:ok, right} -> right
end
end) == :done
end
diff --git a/lib/elixir/test/elixir/kernel/cli_test.exs b/lib/elixir/test/elixir/kernel/cli_test.exs
index 3c8abf0f3..34359afbd 100644
--- a/lib/elixir/test/elixir/kernel/cli_test.exs
+++ b/lib/elixir/test/elixir/kernel/cli_test.exs
@@ -19,7 +19,7 @@ defmodule Kernel.CLI.OptionParsingTest do
test "properly parses paths" do
root = fixture_path("../../..") |> to_char_list
list = elixir('-pa "#{root}/*" -pz "#{root}/lib/*" -e "IO.inspect(:code.get_path, limit: :infinity)"')
- { path, _ } = Code.eval_string list, []
+ {path, _} = Code.eval_string list, []
# pa
assert to_char_list(Path.expand('ebin', root)) in path
diff --git a/lib/elixir/test/elixir/kernel/comprehension_test.exs b/lib/elixir/test/elixir/kernel/comprehension_test.exs
index 7f8222bc6..642647edf 100644
--- a/lib/elixir/test/elixir/kernel/comprehension_test.exs
+++ b/lib/elixir/test/elixir/kernel/comprehension_test.exs
@@ -51,7 +51,7 @@ defmodule Kernel.ComprehensionTest do
end
test "for comprehensions generators precedence" do
- assert (for { _, _ } = x <- [foo: :bar], do: x) ==
+ assert (for {_, _} = x <- [foo: :bar], do: x) ==
[foo: :bar]
end
@@ -121,7 +121,7 @@ defmodule Kernel.ComprehensionTest do
defp collectable_pdict do
fn
- _, { :cont, x } -> Process.put(:into_cont, [x|Process.get(:into_cont)])
+ _, {:cont, x} -> Process.put(:into_cont, [x|Process.get(:into_cont)])
_, :done -> Process.put(:into_done, true)
_, :halt -> Process.put(:into_halt, true)
end
@@ -247,7 +247,7 @@ defmodule Kernel.ComprehensionTest do
end
test :generator_precedence do
- assert lc { _, _ } = x inlist [foo: :bar], do: x
+ assert lc {_, _} = x inlist [foo: :bar], do: x
end
test :bit_comprehensions do
diff --git a/lib/elixir/test/elixir/kernel/docs_test.exs b/lib/elixir/test/elixir/kernel/docs_test.exs
index 008836b14..a5a6bfa30 100644
--- a/lib/elixir/test/elixir/kernel/docs_test.exs
+++ b/lib/elixir/test/elixir/kernel/docs_test.exs
@@ -29,7 +29,7 @@ defmodule Kernel.DocsTest do
]
assert Docs.__info__(:docs) == expected
- assert Docs.__info__(:moduledoc) == { 7, "moduledoc" }
+ assert Docs.__info__(:moduledoc) == {7, "moduledoc"}
end
diff --git a/lib/elixir/test/elixir/kernel/errors_test.exs b/lib/elixir/test/elixir/kernel/errors_test.exs
index be9f3979c..0384c2015 100644
--- a/lib/elixir/test/elixir/kernel/errors_test.exs
+++ b/lib/elixir/test/elixir/kernel/errors_test.exs
@@ -188,11 +188,11 @@ defmodule Kernel.ErrorsTest do
test :literal_on_map_and_struct do
assert_compile_fail SyntaxError,
"nofile:1: syntax error before: '}'",
- '%{ { :a, :b } }'
+ '%{{:a, :b}}'
assert_compile_fail SyntaxError,
"nofile:1: syntax error before: '{'",
- '%{ :a, :b }{ a: :b }'
+ '%{:a, :b}{a: :b}'
end
test :struct_fields_on_defstruct do
@@ -220,11 +220,11 @@ defmodule Kernel.ErrorsTest do
test :unbound_map_key_var do
assert_compile_fail CompileError,
"nofile:1: illegal use of variable x in map key",
- '%{ x => 1 } = %{}'
+ '%{x => 1} = %{}'
assert_compile_fail CompileError,
"nofile:1: illegal use of variable x in map key",
- '%{ x = 1 => 1 }'
+ '%{x = 1 => 1}'
end
test :struct_errors do
@@ -244,13 +244,13 @@ defmodule Kernel.ErrorsTest do
defmodule GoodStruct do
def __struct__ do
- %{ name: "josé" }
+ %{name: "josé"}
end
end
assert_compile_fail CompileError,
"nofile:1: unknown key :age for struct Kernel.ErrorsTest.GoodStruct",
- '%#{GoodStruct}{ age: 27 }'
+ '%#{GoodStruct}{age: 27}'
end
test :name_for_defmodule do
@@ -431,7 +431,7 @@ defmodule Kernel.ErrorsTest do
'''
defmodule ErrorsTest do
defmacrop oops do
- { :foo, :bar, :baz, :bat }
+ {:foo, :bar, :baz, :bat}
end
def test, do: oops
diff --git a/lib/elixir/test/elixir/kernel/expansion_test.exs b/lib/elixir/test/elixir/kernel/expansion_test.exs
index 8fe23ac73..55c37015b 100644
--- a/lib/elixir/test/elixir/kernel/expansion_test.exs
+++ b/lib/elixir/test/elixir/kernel/expansion_test.exs
@@ -31,7 +31,7 @@ defmodule Kernel.ExpansionTest do
alias true, as: True
input = quote do: (alias :hello, as: World, warn: True)
- { output, env } = expand_env(input, __ENV__)
+ {output, env} = expand_env(input, __ENV__)
assert output == quote do: (alias :hello, as: :"Elixir.World", warn: true)
assert env.aliases == [{:"Elixir.True", true}, {:"Elixir.World", :hello}]
@@ -67,9 +67,9 @@ defmodule Kernel.ExpansionTest do
end
test "=: defines vars" do
- { output, env } = expand_env(quote(do: a = 1), __ENV__)
+ {output, env} = expand_env(quote(do: a = 1), __ENV__)
assert output == quote(do: a = 1)
- assert { :a, __MODULE__ } in env.vars
+ assert {:a, __MODULE__} in env.vars
end
test "=: does not carry rhs imports" do
@@ -78,7 +78,7 @@ defmodule Kernel.ExpansionTest do
end
test "=: does not define _" do
- { output, env } = expand_env(quote(do: _ = 1), __ENV__)
+ {output, env} = expand_env(quote(do: _ = 1), __ENV__)
assert output == quote(do: _ = 1)
assert env.vars == []
end
@@ -100,14 +100,14 @@ defmodule Kernel.ExpansionTest do
test "__ENV__" do
env = __ENV__
assert expand_env(quote(do: __ENV__), env) ==
- { { :{}, [], tuple_to_list(env.line(0)) }, env }
+ {{:{}, [], tuple_to_list(env.line(0))}, env}
end
test "__ENV__.accessor" do
env = __ENV__
- assert expand_env(quote(do: __ENV__.file), env) == { __ENV__.file, env }
+ assert expand_env(quote(do: __ENV__.file), env) == {__ENV__.file, env}
assert expand_env(quote(do: __ENV__.unknown), env) ==
- { quote(do: unquote({ :{}, [], tuple_to_list(env.line(0)) }).unknown), env }
+ {quote(do: unquote({:{}, [], tuple_to_list(env.line(0))}).unknown), env}
end
## Super
@@ -119,7 +119,7 @@ defmodule Kernel.ExpansionTest do
## Vars
test "vars: expand to local call" do
- { output, env } = expand_env(quote(do: a), __ENV__)
+ {output, env} = expand_env(quote(do: a), __ENV__)
assert output == quote(do: a())
assert env.vars == []
end
@@ -153,7 +153,7 @@ defmodule Kernel.ExpansionTest do
## Locals
test "locals: expands to remote calls" do
- assert { {:., _, [Kernel, :=~] }, _, [{:a, _, []}, {:b, _, []}] } =
+ assert {{:., _, [Kernel, :=~]}, _, [{:a, _, []}, {:b, _, []}]} =
expand(quote do: a =~ b)
end
@@ -175,34 +175,34 @@ defmodule Kernel.ExpansionTest do
## Tuples
test "tuples: expanded as arguments" do
- assert expand(quote(do: { a = 1, a })) == quote do: { a = 1, a() }
- assert expand(quote(do: { b, a = 1, a })) == quote do: { b(), a = 1, a() }
+ assert expand(quote(do: {a = 1, a})) == quote do: {a = 1, a()}
+ assert expand(quote(do: {b, a = 1, a})) == quote do: {b(), a = 1, a()}
end
## Maps & structs
test "maps: expanded as arguments" do
- assert expand(quote(do: %{ a: a = 1, b: a })) == quote do: %{ a: a = 1, b: a() }
+ assert expand(quote(do: %{a: a = 1, b: a})) == quote do: %{a: a = 1, b: a()}
end
test "structs: expanded as arguments" do
- assert expand(quote(do: %:elixir{ a: a = 1, b: a })) ==
- quote do: %:elixir{ a: a = 1, b: a() }
+ assert expand(quote(do: %:elixir{a: a = 1, b: a})) ==
+ quote do: %:elixir{a: a = 1, b: a()}
- assert expand(quote(do: %:"Elixir.Kernel"{ a: a = 1, b: a })) ==
- quote do: %:"Elixir.Kernel"{ a: a = 1, b: a() }
+ assert expand(quote(do: %:"Elixir.Kernel"{a: a = 1, b: a})) ==
+ quote do: %:"Elixir.Kernel"{a: a = 1, b: a()}
end
test "structs: expects atoms" do
assert_raise CompileError, ~r"expected struct name to be a compile time atom or alias", fn ->
- expand(quote do: %unknown{ a: 1 })
+ expand(quote do: %unknown{a: 1})
end
end
## quote
test "quote: expanded to raw forms" do
- assert expand(quote do: (quote do: hello)) == { :{}, [], [:hello, [], __MODULE__] }
+ assert expand(quote do: (quote do: hello)) == {:{}, [], [:hello, [], __MODULE__]}
end
## Anonymous calls
@@ -417,11 +417,11 @@ defmodule Kernel.ExpansionTest do
test "handles invalid expressions" do
assert_raise CompileError, ~r"invalid quoted expression: {1, 2, 3}", fn ->
- expand(quote do: unquote({ 1, 2, 3 }))
+ expand(quote do: unquote({1, 2, 3}))
end
assert_raise CompileError, ~r"invalid quoted expression: #Function<", fn ->
- expand(quote do: unquote({ :sample, fn -> end }))
+ expand(quote do: unquote({:sample, fn -> end}))
end
end
@@ -440,7 +440,7 @@ defmodule Kernel.ExpansionTest do
end
defp expand_env(expr, env) do
- { expr, env } = :elixir_exp.expand(expr, :elixir_env.ex_to_env(env))
- { expr, set_elem(env, 0, Macro.Env) }
+ {expr, env} = :elixir_exp.expand(expr, :elixir_env.ex_to_env(env))
+ {expr, set_elem(env, 0, Macro.Env)}
end
end
diff --git a/lib/elixir/test/elixir/kernel/fn_test.exs b/lib/elixir/test/elixir/kernel/fn_test.exs
index 82afc6dbd..f85f59a63 100644
--- a/lib/elixir/test/elixir/kernel/fn_test.exs
+++ b/lib/elixir/test/elixir/kernel/fn_test.exs
@@ -71,11 +71,11 @@ defmodule Kernel.FnTest do
end
test "capture and partially apply tuples" do
- assert (&{ &1, &2 }).(1, 2) == { 1, 2 }
- assert (&{ &1, &2, &3 }).(1, 2, 3) == { 1, 2, 3 }
+ assert (&{&1, &2}).(1, 2) == {1, 2}
+ assert (&{&1, &2, &3}).(1, 2, 3) == {1, 2, 3}
- assert (&{ 1, &1 }).(2) == { 1, 2 }
- assert (&{ 1, &1, &2 }).(2, 3) == { 1, 2, 3 }
+ assert (&{1, &1}).(2) == {1, 2}
+ assert (&{1, &1, &2}).(2, 3) == {1, 2, 3}
end
test "capture and partially apply lists" do
diff --git a/lib/elixir/test/elixir/kernel/lexical_tracker_test.exs b/lib/elixir/test/elixir/kernel/lexical_tracker_test.exs
index 2d0abd507..ca914df85 100644
--- a/lib/elixir/test/elixir/kernel/lexical_tracker_test.exs
+++ b/lib/elixir/test/elixir/kernel/lexical_tracker_test.exs
@@ -6,7 +6,7 @@ defmodule Kernel.LexicalTrackerTest do
alias Kernel.LexicalTracker, as: D
setup do
- { :ok, [pid: D.start_link] }
+ {:ok, [pid: D.start_link]}
end
teardown config do
diff --git a/lib/elixir/test/elixir/kernel/overridable_test.exs b/lib/elixir/test/elixir/kernel/overridable_test.exs
index 67996dc45..75eaa3dbd 100644
--- a/lib/elixir/test/elixir/kernel/overridable_test.exs
+++ b/lib/elixir/test/elixir/kernel/overridable_test.exs
@@ -18,33 +18,33 @@ defmodule Kernel.Overridable do
end
def explicit_nested_super do
- { super?, 2 }
+ {super?, 2}
end
- false = Module.overridable? __MODULE__, { :explicit_nested_super, 0 }
+ false = Module.overridable? __MODULE__, {:explicit_nested_super, 0}
defoverridable [sample: 0, with_super: 0, without_super: 0, explicit_nested_super: 0]
- true = Module.overridable? __MODULE__, { :explicit_nested_super, 0 }
+ true = Module.overridable? __MODULE__, {:explicit_nested_super, 0}
def explicit_nested_super do
- { super, super?, 1 }
+ {super, super?, 1}
end
- true = Module.overridable? __MODULE__, { :explicit_nested_super, 0 }
+ true = Module.overridable? __MODULE__, {:explicit_nested_super, 0}
defoverridable [explicit_nested_super: 0]
- true = Module.overridable? __MODULE__, { :explicit_nested_super, 0 }
+ true = Module.overridable? __MODULE__, {:explicit_nested_super, 0}
def implicit_nested_super do
- { super?, 1 }
+ {super?, 1}
end
defoverridable [implicit_nested_super: 0]
def implicit_nested_super do
- { super, super?, 0 }
+ {super, super?, 0}
end
def super_with_explicit_args(x, y) do
@@ -71,11 +71,11 @@ defmodule Kernel.Overridable do
end
def no_overridable do
- { :no_overridable, super? }
+ {:no_overridable, super?}
end
def explicit_nested_super do
- { super, super?, 0 }
+ {super, super?, 0}
end
def super_with_explicit_args(x, y) do
@@ -112,11 +112,11 @@ defmodule Kernel.OverridableTest do
end
test "overridable overridden with nested super" do
- assert Overridable.explicit_nested_super == { { { false, 2 }, true, 1 }, true, 0 }
+ assert Overridable.explicit_nested_super == {{{false, 2}, true, 1}, true, 0}
end
test "overridable node overridden with nested super" do
- assert Overridable.implicit_nested_super == { { false, 1 }, true, 0 }
+ assert Overridable.implicit_nested_super == {{false, 1}, true, 0}
end
test "calling super with explicit args" do
@@ -124,7 +124,7 @@ defmodule Kernel.OverridableTest do
end
test "function without overridable returns false for super?" do
- assert Overridable.no_overridable == { :no_overridable, false }
+ assert Overridable.no_overridable == {:no_overridable, false}
end
test "overridable with many clauses" do
diff --git a/lib/elixir/test/elixir/kernel/quote_test.exs b/lib/elixir/test/elixir/kernel/quote_test.exs
index 140f0cf36..047f3d06b 100644
--- a/lib/elixir/test/elixir/kernel/quote_test.exs
+++ b/lib/elixir/test/elixir/kernel/quote_test.exs
@@ -8,22 +8,22 @@ defmodule Kernel.QuoteTest do
end
test :tuple do
- assert quote(do: { :a, 1 }) == {:a, 1}
+ assert quote(do: {:a, 1}) == {:a, 1}
end
test :keep_line do
## DO NOT MOVE THIS LINE
- assert quote(location: :keep, do: bar(1, 2, 3)) == { :bar, [keep: 16], [1, 2, 3] }
+ assert quote(location: :keep, do: bar(1, 2, 3)) == {:bar, [keep: 16], [1, 2, 3]}
end
test :fixed_line do
- assert quote(line: 3, do: bar(1, 2, 3)) == { :bar, [line: 3], [1, 2, 3] }
+ assert quote(line: 3, do: bar(1, 2, 3)) == {:bar, [line: 3], [1, 2, 3]}
end
test :quote_line_var do
## DO NOT MOVE THIS LINE
line = __ENV__.line
- assert quote(line: line, do: bar(1, 2, 3)) == { :bar, [line: 25], [1, 2, 3] }
+ assert quote(line: line, do: bar(1, 2, 3)) == {:bar, [line: 25], [1, 2, 3]}
end
test :unquote_call do
@@ -33,11 +33,11 @@ defmodule Kernel.QuoteTest do
assert quote(do: foo.unquote(:bar)) == quote(do: foo.bar)
assert quote(do: foo.unquote(:bar)(1)) == quote(do: foo.bar(1))
assert quote(do: foo.unquote(:bar)(1) do 2 + 3 end) == quote(do: foo.bar(1) do 2 + 3 end)
- assert quote(do: foo.unquote({ :bar, [], nil })) == quote(do: foo.bar)
- assert quote(do: foo.unquote({ :bar, [], [1,2] })) == quote(do: foo.bar(1,2))
+ assert quote(do: foo.unquote({:bar, [], nil})) == quote(do: foo.bar)
+ assert quote(do: foo.unquote({:bar, [], [1,2]})) == quote(do: foo.bar(1,2))
- assert Code.eval_quoted(quote(do: Foo.unquote(Bar))) == { Elixir.Foo.Bar, [] }
- assert Code.eval_quoted(quote(do: Foo.unquote(quote do: Bar))) == { Elixir.Foo.Bar, [] }
+ assert Code.eval_quoted(quote(do: Foo.unquote(Bar))) == {Elixir.Foo.Bar, []}
+ assert Code.eval_quoted(quote(do: Foo.unquote(quote do: Bar))) == {Elixir.Foo.Bar, []}
assert_raise ArgumentError, fn ->
quote(do: foo.unquote(1))
@@ -45,7 +45,7 @@ defmodule Kernel.QuoteTest do
end
test :nested_quote do
- assert { :quote, _, [[do: { :unquote, _, _ }]] } = quote(do: quote(do: unquote(x)))
+ assert {:quote, _, [[do: {:unquote, _, _}]]} = quote(do: quote(do: unquote(x)))
end
defmacrop nested_quote_in_macro do
@@ -62,7 +62,7 @@ defmodule Kernel.QuoteTest do
assert nested_quote_in_macro == 1
end
- Enum.each [foo: 1, bar: 2, baz: 3], fn { k, v } ->
+ Enum.each [foo: 1, bar: 2, baz: 3], fn {k, v} ->
def unquote(k)(arg) do
unquote(v) + arg
end
@@ -92,11 +92,11 @@ defmodule Kernel.QuoteTest do
end
test :splice_on_stab do
- { fun, [] } =
+ {fun, []} =
Code.eval_quoted(quote(do: fn(unquote_splicing([1, 2, 3])) -> :ok end), [])
assert fun.(1, 2, 3) == :ok
- { fun, [] } =
+ {fun, []} =
Code.eval_quoted(quote(do: fn(1, unquote_splicing([2, 3])) -> :ok end), [])
assert fun.(1, 2, 3) == :ok
end
@@ -112,13 +112,13 @@ defmodule Kernel.QuoteTest do
end
test :splice_on_map do
- assert %{ unquote_splicing([foo: :bar]) } == %{ foo: :bar }
- assert %{ unquote_splicing([foo: :bar]), baz: :bat } == %{ foo: :bar, baz: :bat }
- assert %{ unquote_splicing([foo: :bar]), :baz => :bat } == %{ foo: :bar, baz: :bat }
- assert %{ :baz => :bat, unquote_splicing([foo: :bar]) } == %{ foo: :bar, baz: :bat }
+ assert %{unquote_splicing([foo: :bar])} == %{foo: :bar}
+ assert %{unquote_splicing([foo: :bar]), baz: :bat} == %{foo: :bar, baz: :bat}
+ assert %{unquote_splicing([foo: :bar]), :baz => :bat} == %{foo: :bar, baz: :bat}
+ assert %{:baz => :bat, unquote_splicing([foo: :bar])} == %{foo: :bar, baz: :bat}
- map = %{ foo: :default }
- assert %{ map | unquote_splicing([foo: :bar]) } == %{ foo: :bar }
+ map = %{foo: :default}
+ assert %{map | unquote_splicing([foo: :bar])} == %{foo: :bar}
end
test :when do
@@ -130,21 +130,21 @@ defmodule Kernel.QuoteTest do
end
test :stab do
- assert [{ :->, _, [[], nil] }] = (quote do -> end)
- assert [{ :->, _, [[], nil] }] = (quote do: (->))
+ assert [{:->, _, [[], nil]}] = (quote do -> end)
+ assert [{:->, _, [[], nil]}] = (quote do: (->))
- assert [{ :->, _, [[1], nil] }] = (quote do 1 -> end)
- assert [{ :->, _, [[1], nil] }] = (quote do: (1 ->))
+ assert [{:->, _, [[1], nil]}] = (quote do 1 -> end)
+ assert [{:->, _, [[1], nil]}] = (quote do: (1 ->))
- assert [{ :->, _, [[], 1] }] = (quote do -> 1 end)
- assert [{ :->, _, [[], 1] }] = (quote do: (-> 1))
+ assert [{:->, _, [[], 1]}] = (quote do -> 1 end)
+ assert [{:->, _, [[], 1]}] = (quote do: (-> 1))
end
test :bind_quoted do
- assert quote(bind_quoted: [foo: 1 + 2], do: foo) == { :__block__, [], [
- { :=, [], [{ :foo, [], Kernel.QuoteTest }, 3] },
- { :foo, [], Kernel.QuoteTest }
- ] }
+ assert quote(bind_quoted: [foo: 1 + 2], do: foo) == {:__block__, [], [
+ {:=, [], [{:foo, [], Kernel.QuoteTest}, 3]},
+ {:foo, [], Kernel.QuoteTest}
+ ]}
end
test :literals do
@@ -159,7 +159,7 @@ defmodule Kernel.QuoteTest do
end
test :with_dynamic_opts do
- assert quote(dynamic_opts, do: bar(1, 2, 3)) == { :bar, [line: 3], [1, 2, 3] }
+ assert quote(dynamic_opts, do: bar(1, 2, 3)) == {:bar, [line: 3], [1, 2, 3]}
end
test :unary_with_integer_precedence do
@@ -196,7 +196,7 @@ defmodule Kernel.QuoteTest.ErrorsTest do
mod = Kernel.QuoteTest.ErrorsTest
file = __ENV__.file |> Path.relative_to_cwd |> List.from_char_data!
- assert [{ ^mod, :add, 2, [file: ^file, line: 176] }|_] = System.stacktrace
+ assert [{^mod, :add, 2, [file: ^file, line: 176]}|_] = System.stacktrace
end
test :outside_function_error do
@@ -206,7 +206,7 @@ defmodule Kernel.QuoteTest.ErrorsTest do
mod = Kernel.QuoteTest.ErrorsTest
file = __ENV__.file |> Path.relative_to_cwd |> List.from_char_data!
- assert [{ ^mod, _, _, [file: ^file, line: 204] }|_] = System.stacktrace
+ assert [{^mod, _, _, [file: ^file, line: 204]}|_] = System.stacktrace
end
end
@@ -320,17 +320,17 @@ defmodule Kernel.QuoteTest.AliasHygieneTest do
alias Dict, as: SuperDict
test :annotate_aliases do
- assert { :__aliases__, [alias: false], [:Foo, :Bar] } =
+ assert {:__aliases__, [alias: false], [:Foo, :Bar]} =
quote(do: Foo.Bar)
- assert { :__aliases__, [alias: false], [:Dict, :Bar] } =
+ assert {:__aliases__, [alias: false], [:Dict, :Bar]} =
quote(do: Dict.Bar)
- assert { :__aliases__, [alias: Dict.Bar], [:SuperDict, :Bar] } =
+ assert {:__aliases__, [alias: Dict.Bar], [:SuperDict, :Bar]} =
quote(do: SuperDict.Bar)
end
test :expand_aliases do
- assert Code.eval_quoted(quote do: SuperDict.Bar) == { Elixir.Dict.Bar, [] }
- assert Code.eval_quoted(quote do: alias!(SuperDict.Bar)) == { Elixir.SuperDict.Bar, [] }
+ assert Code.eval_quoted(quote do: SuperDict.Bar) == {Elixir.Dict.Bar, []}
+ assert Code.eval_quoted(quote do: alias!(SuperDict.Bar)) == {Elixir.SuperDict.Bar, []}
end
test :expand_aliases_without_macro do
diff --git a/lib/elixir/test/elixir/kernel/rescue_test.exs b/lib/elixir/test/elixir/kernel/rescue_test.exs
index 9591f7ccd..24bc69c52 100644
--- a/lib/elixir/test/elixir/kernel/rescue_test.exs
+++ b/lib/elixir/test/elixir/kernel/rescue_test.exs
@@ -149,7 +149,7 @@ defmodule Kernel.RescueTest do
test :tuple_badarg_error do
result = try do
- :erlang.error({ :badarg, [1, 2, 3] })
+ :erlang.error({:badarg, [1, 2, 3]})
rescue
x in [ArgumentError] -> x.message
end
diff --git a/lib/elixir/test/elixir/kernel/typespec_test.exs b/lib/elixir/test/elixir/kernel/typespec_test.exs
index 879cb2695..7927f47d4 100644
--- a/lib/elixir/test/elixir/kernel/typespec_test.exs
+++ b/lib/elixir/test/elixir/kernel/typespec_test.exs
@@ -10,7 +10,7 @@ defmodule Kernel.TypespecTest do
# module
defmacrop test_module([{:do, block}]) do
quote do
- { :module, _, _binary, result } = defmodule T do
+ {:module, _, _binary, result} = defmodule T do
unquote(block)
end
:code.delete(T)
@@ -323,7 +323,7 @@ defmodule Kernel.TypespecTest do
end
test "@spec(spec) with guards" do
- { spec1, spec2, spec3 } = test_module do
+ {spec1, spec2, spec3} = test_module do
def myfun1(x), do: x
spec1 = @spec myfun1(x) :: boolean when [x: integer]
@@ -333,7 +333,7 @@ defmodule Kernel.TypespecTest do
def myfun3(_x, y), do: y
spec3 = @spec myfun3(x, y) :: y when [y: x, x: var]
- { spec1, spec2, spec3 }
+ {spec1, spec2, spec3}
end
assert {{:myfun1, 1}, {:type, _, :bounded_fun, [{:type, _, :fun, [{:type, _, :product, [{:var, _, :x}]}, {:type, _, :boolean, []}]}, [{:type, _, :constraint, [{:atom, _, :is_subtype}, [{:var, _, :x}, {:type, _, :integer, []}]]}]]}} = spec1
assert {{:myfun2, 1}, {:type, _, :fun, [{:type, _, :product, [{:var, _, :x}]}, {:var, _, :x}]}} = spec2
@@ -353,23 +353,23 @@ defmodule Kernel.TypespecTest do
end
test "@spec + @callback" do
- { specs, callbacks } = test_module do
+ {specs, callbacks} = test_module do
def myfun(x), do: x
@spec myfun(integer) :: integer
@spec myfun(char_list) :: char_list
@callback cb(integer) :: integer
- { @spec, @callback }
+ {@spec, @callback}
end
assert [
- { {:cb, 1}, {:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}]} }
+ {{:cb, 1}, {:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}]}}
] = Enum.sort(callbacks)
assert [
- { {:myfun, 1}, {:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}]} },
- { {:myfun, 1}, {:type, _, :fun, [{:type, _, :product, [
+ {{:myfun, 1}, {:type, _, :fun, [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}]}},
+ {{:myfun, 1}, {:type, _, :fun, [{:type, _, :product, [
{:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :char_list}, []]}]},
- {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :char_list}, []]}]} }
+ {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :char_list}, []]}]}}
] = Enum.sort(specs)
end
@@ -418,43 +418,43 @@ defmodule Kernel.TypespecTest do
types = Enum.reverse(types)
- for { type, definition } <- Enum.zip(types, quoted) do
+ for {type, definition} <- Enum.zip(types, quoted) do
ast = Kernel.Typespec.type_to_ast(type)
assert Macro.to_string(quote do: @type unquote(ast)) == Macro.to_string(definition)
end
end
test "type_to_ast for records" do
- record_type = { { :record, :my_record },
+ record_type = {{:record, :my_record},
[
- { :typed_record_field,
- { :record_field, 0, { :atom, 0, :field1 }},
- { :type, 0, :atom, [] } },
- { :typed_record_field,
- { :record_field, 0, { :atom, 0, :field2 }},
- { :type, 0, :integer, [] } },
+ {:typed_record_field,
+ {:record_field, 0, {:atom, 0, :field1}},
+ {:type, 0, :atom, []}},
+ {:typed_record_field,
+ {:record_field, 0, {:atom, 0, :field2}},
+ {:type, 0, :integer, []}},
],
[]}
assert Kernel.Typespec.type_to_ast(record_type) ==
- { :::, [], [
- { :my_record, [], [] },
- { :{}, [], [:my_record,
- { :::, [line: 0], [
+ {:::, [], [
+ {:my_record, [], []},
+ {:{}, [], [:my_record,
+ {:::, [line: 0], [
{:field1, 0, nil},
{:atom, [line: 0], []}
- ] },
- { :::, [line: 0], [
+ ]},
+ {:::, [line: 0], [
{:field2, 0, nil},
{:integer, [line: 0], []}
- ] }
- ] }
- ] }
+ ]}
+ ]}
+ ]}
end
test "type_to_ast for paren_type" do
type = {:my_type, {:paren_type, 0, [{:type, 0, :integer, []}]}, []}
assert Kernel.Typespec.type_to_ast(type) ==
- { :::, [], [{:my_type, [], []}, {:integer, [line: 0], []}] }
+ {:::, [], [{:my_type, [], []}, {:integer, [line: 0], []}]}
end
test "spec_to_ast" do
@@ -473,7 +473,7 @@ defmodule Kernel.TypespecTest do
Enum.reverse @spec
end
- for { { { _, _ }, spec }, definition } <- Enum.zip(compiled, specs) do
+ for {{{_, _}, spec}, definition} <- Enum.zip(compiled, specs) do
quoted = quote do: @spec unquote(Kernel.Typespec.spec_to_ast(:a, spec))
assert Macro.to_string(quoted) == Macro.to_string(definition)
end
@@ -482,7 +482,7 @@ defmodule Kernel.TypespecTest do
# types/specs retrieval
test "specs retrieval" do
- { :module, _, binary, _ } = defmodule T do
+ {:module, _, binary, _} = defmodule T do
@spec a :: any
def a, do: nil
end
@@ -495,7 +495,7 @@ defmodule Kernel.TypespecTest do
end
test "types retrieval" do
- { :module, _, binary, _ } = defmodule T do
+ {:module, _, binary, _} = defmodule T do
@type a :: any
@typep b :: any
@spec t(b) :: b
@@ -514,7 +514,7 @@ defmodule Kernel.TypespecTest do
end
test "typedoc retrieval" do
- { :module, _, binary, _ } = defmodule T do
+ {:module, _, binary, _} = defmodule T do
@typedoc "A"
@type a :: any
@typep b :: any
diff --git a/lib/elixir/test/elixir/kernel_test.exs b/lib/elixir/test/elixir/kernel_test.exs
index 9a747b090..4f90b8088 100644
--- a/lib/elixir/test/elixir/kernel_test.exs
+++ b/lib/elixir/test/elixir/kernel_test.exs
@@ -24,7 +24,7 @@ defmodule KernelTest do
x = List.first([1])
assert_raise MatchError, fn ->
- { x, ^x } = { 2, 2 }
+ {x, ^x} = {2, 2}
x
end
end
@@ -94,7 +94,7 @@ defmodule KernelTest do
assert ((); ();) == nil
assert [ 1, (), 3 ] == [1, nil, 3 ]
assert [do: ()] == [do: nil]
- assert { 1, (), 3 } == { 1, nil, 3 }
+ assert {1, (), 3} == {1, nil, 3}
assert (Kernel.&& nil, ()) == nil
assert (Kernel.&& nil, ()) == nil
assert (() && ()) == nil
@@ -106,11 +106,11 @@ defmodule KernelTest do
end
test :__info__ do
- assert { :in, 2 } in Kernel.__info__(:macros)
+ assert {:in, 2} in Kernel.__info__(:macros)
end
test :__info__not_included do
- assert not ({ :__info__, 1 } in Kernel.__info__(:functions))
+ assert not ({:__info__, 1} in Kernel.__info__(:functions))
end
test :macro_exported? do
@@ -131,7 +131,7 @@ defmodule KernelTest do
end
test :function_from___ENV__ do
- assert __ENV__.function == { :test_function_from___ENV__, 1 }
+ assert __ENV__.function == {:test_function_from___ENV__, 1}
end
test :binding do
@@ -165,11 +165,11 @@ defmodule KernelTest do
assert struct(User) == %User{name: "jose"}
user = struct(User, name: "eric")
- assert user == %User{ name: "eric" }
+ assert user == %User{name: "eric"}
assert struct(user, unknown: "key") == user
- assert struct(user, name: "jose") == %User{ name: "jose" }
- assert struct(user, name: "other", __struct__: Post) == %User{ name: "other" }
+ assert struct(user, name: "jose") == %User{name: "jose"}
+ assert struct(user, name: "other", __struct__: Post) == %User{name: "other"}
end
defmodule Conversions do
diff --git a/lib/elixir/test/elixir/keyword_test.exs b/lib/elixir/test/elixir/keyword_test.exs
index ef6aa9587..fc6f497d8 100644
--- a/lib/elixir/test/elixir/keyword_test.exs
+++ b/lib/elixir/test/elixir/keyword_test.exs
@@ -4,14 +4,14 @@ defmodule KeywordTest do
use ExUnit.Case, async: true
test "has a literal syntax" do
- assert [B: 1] == [{ :B, 1 }]
+ assert [B: 1] == [{:B, 1}]
assert [foo?: :bar] == [{:foo?, :bar}]
assert [||: 2, +: 1] == [{:||, 2}, {:+, 1}]
- assert [1, 2, three: :four] == [1, 2, { :three, :four }]
+ assert [1, 2, three: :four] == [1, 2, {:three, :four}]
end
test "is a :: operator on ambiguity" do
- assert [{ :::, _, [{ :a, _, _ }, { :b, _, _ }] }] = quote(do: [a::b])
+ assert [{:::, _, [{:a, _, _}, {:b, _, _}]}] = quote(do: [a::b])
end
test "supports optional comma" do
@@ -38,7 +38,7 @@ defmodule KeywordTest do
end
test "new/2" do
- assert Keyword.new([:a, :b], fn x -> { x, x } end) ==
+ assert Keyword.new([:a, :b], fn x -> {x, x} end) ==
[b: :b, a: :a]
end
diff --git a/lib/elixir/test/elixir/list_test.exs b/lib/elixir/test/elixir/list_test.exs
index 339ffc975..927f433d7 100644
--- a/lib/elixir/test/elixir/list_test.exs
+++ b/lib/elixir/test/elixir/list_test.exs
@@ -83,14 +83,14 @@ defmodule ListTest do
end
test :keyfind do
- assert List.keyfind([a: 1, b: 2], :a, 0) == { :a, 1 }
- assert List.keyfind([a: 1, b: 2], 2, 1) == { :b, 2 }
+ assert List.keyfind([a: 1, b: 2], :a, 0) == {:a, 1}
+ assert List.keyfind([a: 1, b: 2], 2, 1) == {:b, 2}
assert List.keyfind([a: 1, b: 2], :c, 0) == nil
end
test :keyreplace do
- assert List.keyreplace([a: 1, b: 2], :a, 0, { :a, 3 }) == [a: 3, b: 2]
- assert List.keyreplace([a: 1], :b, 0, { :b, 2 }) == [a: 1]
+ assert List.keyreplace([a: 1, b: 2], :a, 0, {:a, 3}) == [a: 3, b: 2]
+ assert List.keyreplace([a: 1], :b, 0, {:b, 2}) == [a: 1]
end
test :keysort do
@@ -99,8 +99,8 @@ defmodule ListTest do
end
test :keystore do
- assert List.keystore([a: 1, b: 2], :a, 0, { :a, 3 }) == [a: 3, b: 2]
- assert List.keystore([a: 1], :b, 0, { :b, 2 }) == [a: 1, b: 2]
+ assert List.keystore([a: 1, b: 2], :a, 0, {:a, 3}) == [a: 3, b: 2]
+ assert List.keystore([a: 1], :b, 0, {:b, 2}) == [a: 1, b: 2]
end
test :keymember? do
@@ -110,9 +110,9 @@ defmodule ListTest do
end
test :keydelete do
- assert List.keydelete([a: 1, b: 2], :a, 0) == [{ :b, 2 }]
- assert List.keydelete([a: 1, b: 2], 2, 1) == [{ :a, 1 }]
- assert List.keydelete([a: 1, b: 2], :c, 0) == [{ :a, 1 }, { :b, 2 }]
+ assert List.keydelete([a: 1, b: 2], :a, 0) == [{:b, 2}]
+ assert List.keydelete([a: 1, b: 2], 2, 1) == [{:a, 1}]
+ assert List.keydelete([a: 1, b: 2], :c, 0) == [{:a, 1}, {:b, 2}]
end
test :insert_at do
diff --git a/lib/elixir/test/elixir/macro_test.exs b/lib/elixir/test/elixir/macro_test.exs
index 3dcd3fb55..74ca735ae 100644
--- a/lib/elixir/test/elixir/macro_test.exs
+++ b/lib/elixir/test/elixir/macro_test.exs
@@ -25,13 +25,13 @@ defmodule MacroTest do
## Escape
test :escape_handle_tuples_with_size_different_than_two do
- assert { :{}, [], [:a] } == Macro.escape({ :a })
- assert { :{}, [], [:a, :b, :c] } == Macro.escape({ :a, :b, :c })
- assert { :{}, [], [:a, { :{}, [], [1,2,3] }, :c] } == Macro.escape({ :a, { 1, 2, 3 }, :c })
+ assert {:{}, [], [:a]} == Macro.escape({:a})
+ assert {:{}, [], [:a, :b, :c]} == Macro.escape({:a, :b, :c})
+ assert {:{}, [], [:a, {:{}, [], [1,2,3]}, :c]} == Macro.escape({:a, {1, 2, 3}, :c})
end
test :escape_simply_returns_tuples_with_size_equal_to_two do
- assert { :a, :b } == Macro.escape({ :a, :b })
+ assert {:a, :b} == Macro.escape({:a, :b})
end
test :escape_simply_returns_any_other_structure do
@@ -39,11 +39,11 @@ defmodule MacroTest do
end
test :escape_handles_maps do
- assert { :%{}, [], [a: 1] } = Macro.escape(%{ a: 1 })
+ assert {:%{}, [], [a: 1]} = Macro.escape(%{a: 1})
end
test :escape_works_recursively do
- assert [1,{:{}, [], [:a,:b,:c]}, 3] == Macro.escape([1, { :a, :b, :c }, 3])
+ assert [1,{:{}, [], [:a,:b,:c]}, 3] == Macro.escape([1, {:a, :b, :c}, 3])
end
test :escape_improper do
@@ -56,11 +56,11 @@ defmodule MacroTest do
assert Macro.escape(contents, unquote: true) == 1
contents = quote unquote: false, do: unquote(x)
- assert Macro.escape(contents, unquote: true) == { :x, [], MacroTest }
+ assert Macro.escape(contents, unquote: true) == {:x, [], MacroTest}
end
defp eval_escaped(contents) do
- { eval, [] } = Code.eval_quoted(Macro.escape(contents, unquote: true))
+ {eval, []} = Code.eval_quoted(Macro.escape(contents, unquote: true))
eval
end
@@ -107,7 +107,7 @@ defmodule MacroTest do
## Expansion
test :expand_once do
- assert { :||, _, _ } = Macro.expand_once(quote(do: oror(1, false)), __ENV__)
+ assert {:||, _, _} = Macro.expand_once(quote(do: oror(1, false)), __ENV__)
end
test :expand_once_with_raw_atom do
@@ -156,7 +156,7 @@ defmodule MacroTest do
test :expand_once_env do
env = __ENV__
- assert Macro.expand_once(quote(do: __ENV__), env) == { :{}, [], tuple_to_list(env) }
+ assert Macro.expand_once(quote(do: __ENV__), env) == {:{}, [], tuple_to_list(env)}
assert Macro.expand_once(quote(do: __ENV__.file), env) == env.file
assert Macro.expand_once(quote(do: __ENV__.unkown), env) == quote(do: __ENV__.unkown)
end
@@ -181,7 +181,7 @@ defmodule MacroTest do
end
test :expand_once_with_imported_macro do
- temp_var = { :x, [], Kernel }
+ temp_var = {:x, [], Kernel}
assert expand_once_and_clean(quote(do: 1 || false), __ENV__) == (quote context: Kernel do
case 1 do
unquote(temp_var) when unquote(temp_var) in [false, nil] -> false
@@ -191,7 +191,7 @@ defmodule MacroTest do
end
test :expand_once_with_require_macro do
- temp_var = { :x, [], Kernel }
+ temp_var = {:x, [], Kernel}
assert expand_once_and_clean(quote(do: Kernel.||(1, false)), __ENV__) == (quote context: Kernel do
case 1 do
unquote(temp_var) when unquote(temp_var) in [false, nil] -> false
@@ -217,7 +217,7 @@ defmodule MacroTest do
end
test :expand do
- temp_var = { :x, [], Kernel }
+ temp_var = {:x, [], Kernel}
assert expand_and_clean(quote(do: oror(1, false)), __ENV__) == (quote context: Kernel do
case 1 do
unquote(temp_var) when unquote(temp_var) in [false, nil] -> false
@@ -353,7 +353,7 @@ defmodule MacroTest do
test :containers_to_string do
assert Macro.to_string(quote do: {}) == "{}"
assert Macro.to_string(quote do: []) == "[]"
- assert Macro.to_string(quote do: { 1, 2, 3 }) == "{1, 2, 3}"
+ assert Macro.to_string(quote do: {1, 2, 3}) == "{1, 2, 3}"
assert Macro.to_string(quote do: [ 1, 2, 3 ]) == "[1, 2, 3]"
assert Macro.to_string(quote do: %{}) == "%{}"
assert Macro.to_string(quote do: %{:foo => :bar}) == "%{foo: :bar}"
@@ -413,9 +413,9 @@ defmodule MacroTest do
assert Macro.to_string(quote do: foo(x: y, p: q)) == "foo(x: y, p: q)"
assert Macro.to_string(quote do: foo(a, x: y, p: q)) == "foo(a, x: y, p: q)"
- assert Macro.to_string(quote do: { [] }) == "{[]}"
- assert Macro.to_string(quote do: { [a: b] }) == "{[a: b]}"
- assert Macro.to_string(quote do: { x, a: b }) == "{x, [a: b]}"
+ assert Macro.to_string(quote do: {[]}) == "{[]}"
+ assert Macro.to_string(quote do: {[a: b]}) == "{[a: b]}"
+ assert Macro.to_string(quote do: {x, a: b}) == "{x, [a: b]}"
end
test :to_string_with_fun do
@@ -445,22 +445,22 @@ defmodule MacroTest do
end
test :unsafe_terms do
- assert Macro.safe_term(quote do: 1+1) == { :unsafe, quote do: 1 + 1 }
- assert Macro.safe_term(quote do: [1+1]) == { :unsafe, quote do: 1 + 1 }
- assert Macro.safe_term(quote do: {1+1}) == { :unsafe, quote do: 1 + 1 }
- assert Macro.safe_term(quote do: %{a: 1+1}) == { :unsafe, quote do: 1 + 1 }
+ assert Macro.safe_term(quote do: 1+1) == {:unsafe, quote do: 1 + 1}
+ assert Macro.safe_term(quote do: [1+1]) == {:unsafe, quote do: 1 + 1}
+ assert Macro.safe_term(quote do: {1+1}) == {:unsafe, quote do: 1 + 1}
+ assert Macro.safe_term(quote do: %{a: 1+1}) == {:unsafe, quote do: 1 + 1}
end
## decompose_call
test :decompose_call do
- assert Macro.decompose_call(quote do: foo) == { :foo, [] }
- assert Macro.decompose_call(quote do: foo()) == { :foo, [] }
- assert Macro.decompose_call(quote do: foo(1, 2, 3)) == { :foo, [1, 2, 3] }
+ assert Macro.decompose_call(quote do: foo) == {:foo, []}
+ assert Macro.decompose_call(quote do: foo()) == {:foo, []}
+ assert Macro.decompose_call(quote do: foo(1, 2, 3)) == {:foo, [1, 2, 3]}
assert Macro.decompose_call(quote do: M.N.foo(1, 2, 3)) ==
- { { :__aliases__, [alias: false], [:M, :N] }, :foo, [1, 2, 3] }
+ {{:__aliases__, [alias: false], [:M, :N]}, :foo, [1, 2, 3]}
assert Macro.decompose_call(quote do: :foo.foo(1, 2, 3)) ==
- { :foo, :foo, [1, 2, 3] }
+ {:foo, :foo, [1, 2, 3]}
assert Macro.decompose_call(quote do: 1.(1, 2, 3)) == :error
assert Macro.decompose_call(quote do: "some string") == :error
end
@@ -469,11 +469,11 @@ defmodule MacroTest do
test :env_stacktrace do
env = __ENV__.file("foo").line(12)
- assert env.stacktrace == [{ __MODULE__, :test_env_stacktrace, 1, [file: "foo", line: 12] }]
+ assert env.stacktrace == [{__MODULE__, :test_env_stacktrace, 1, [file: "foo", line: 12]}]
env = env.function(nil)
- assert env.stacktrace == [{ __MODULE__, :__MODULE__, 0, [file: "foo", line: 12] }]
+ assert env.stacktrace == [{__MODULE__, :__MODULE__, 0, [file: "foo", line: 12]}]
env = env.module(nil)
- assert env.stacktrace == [{ :elixir_compiler, :__FILE__, 1, [file: "foo", line: 12] }]
+ assert env.stacktrace == [{:elixir_compiler, :__FILE__, 1, [file: "foo", line: 12]}]
end
test :context_modules do
diff --git a/lib/elixir/test/elixir/map_test.exs b/lib/elixir/test/elixir/map_test.exs
index d50d0fb9e..0d0223d44 100644
--- a/lib/elixir/test/elixir/map_test.exs
+++ b/lib/elixir/test/elixir/map_test.exs
@@ -8,55 +8,55 @@ defmodule MapTest do
end
defp two_items_map do
- %{ a: 1, b: 2 }
+ %{a: 1, b: 2}
end
- @map %{ a: 1, b: 2 }
+ @map %{a: 1, b: 2}
test "maps in attributes" do
- assert @map == %{ a: 1, b: 2 }
+ assert @map == %{a: 1, b: 2}
end
test "maps when quoted" do
assert (quote do
- %{ foo: 1 }
- end) == { :%{}, [], [{ :foo, 1 }] }
+ %{foo: 1}
+ end) == {:%{}, [], [{:foo, 1}]}
assert (quote do
%
- { foo: 1 }
- end) == { :%{}, [], [{ :foo, 1 }] }
+ {foo: 1}
+ end) == {:%{}, [], [{:foo, 1}]}
end
test "structs when quoted" do
assert (quote do
- %User{ foo: 1 }
- end) == { :%, [], [
- { :__aliases__, [alias: false], [:User] },
- { :%{}, [], [{ :foo, 1 }] }
- ] }
+ %User{foo: 1}
+ end) == {:%, [], [
+ {:__aliases__, [alias: false], [:User]},
+ {:%{}, [], [{:foo, 1}]}
+ ]}
assert (quote do
%
- User{ foo: 1 }
- end) == { :%, [], [
- { :__aliases__, [alias: false], [:User] },
- { :%{}, [], [{ :foo, 1 }] }
- ] }
+ User{foo: 1}
+ end) == {:%, [], [
+ {:__aliases__, [alias: false], [:User]},
+ {:%{}, [], [{:foo, 1}]}
+ ]}
assert (quote do
- %unquote(User){ foo: 1 }
- end) == { :%, [], [User, { :%{}, [], [{ :foo, 1 }] }] }
+ %unquote(User){foo: 1}
+ end) == {:%, [], [User, {:%{}, [], [{:foo, 1}]}]}
end
test "maps keywords and atoms" do
- assert [%{}: :%] == [{ :%{}, :% }]
- assert [%: :%{}] == [{ :%, :%{} }]
+ assert [%{}: :%] == [{:%{}, :%}]
+ assert [%: :%{}] == [{:%, :%{}}]
end
test "maps with variables" do
a = 0
- assert %{ a: a = 1, b: a } == %{ a: 1, b: 0 }
+ assert %{a: a = 1, b: a} == %{a: 1, b: 0}
assert a == 1
end
@@ -71,22 +71,22 @@ defmodule MapTest do
end
test "maps with optional comma" do
- assert %{ a: :b, } == %{ a: :b }
- assert %{ 1 => 2, } == %{ 1 => 2 }
- assert %{ 1 => 2, a: :b, } == %{ 1 => 2, a: :b }
+ assert %{a: :b,} == %{a: :b}
+ assert %{1 => 2,} == %{1 => 2}
+ assert %{1 => 2, a: :b,} == %{1 => 2, a: :b}
end
test "maps with duplicate keys" do
- assert %{ a: :b, a: :c } == %{ a: :c }
- assert %{ 1 => 2, 1 => 3 } == %{ 1 => 3 }
- assert %{ :a => :b, a: :c } == %{ a: :c }
+ assert %{a: :b, a: :c} == %{a: :c}
+ assert %{1 => 2, 1 => 3} == %{1 => 3}
+ assert %{:a => :b, a: :c} == %{a: :c}
end
test "update maps" do
- assert %{ two_items_map | a: 3 } == %{ a: 3, b: 2 }
+ assert %{two_items_map | a: 3} == %{a: 3, b: 2}
assert_raise ArgumentError, fn ->
- %{ two_items_map | c: 3 }
+ %{two_items_map | c: 3}
end
end
@@ -100,27 +100,27 @@ defmodule MapTest do
defmodule ExternalUser do
def __struct__ do
- %{ __struct__: ThisDoesNotLeak, name: "josé", age: 27 }
+ %{__struct__: ThisDoesNotLeak, name: "josé", age: 27}
end
end
test "structs" do
assert %ExternalUser{} ==
- %{ __struct__: ExternalUser, name: "josé", age: 27 }
+ %{__struct__: ExternalUser, name: "josé", age: 27}
assert %ExternalUser{name: "valim"} ==
- %{ __struct__: ExternalUser, name: "valim", age: 27 }
+ %{__struct__: ExternalUser, name: "valim", age: 27}
user = %ExternalUser{}
- assert %ExternalUser{ user | name: "valim" } ==
- %{ __struct__: ExternalUser, name: "valim", age: 27 }
+ assert %ExternalUser{user | name: "valim"} ==
+ %{__struct__: ExternalUser, name: "valim", age: 27}
%ExternalUser{name: name} = %ExternalUser{}
assert name == "josé"
map = %{}
assert_raise BadStructError, "expected a struct named MapTest.ExternalUser, got: %{}", fn ->
- %ExternalUser{ map | name: "valim" }
+ %ExternalUser{map | name: "valim"}
end
end
@@ -143,8 +143,8 @@ defmodule MapTest do
end
test "local user" do
- assert LocalUser.new == %LocalUser{ name: "josé", nested: %LocalUser.NestedUser{} }
- assert LocalUser.Context.new == %LocalUser{ name: "josé", nested: %LocalUser.NestedUser{} }
+ assert LocalUser.new == %LocalUser{name: "josé", nested: %LocalUser.NestedUser{}}
+ assert LocalUser.Context.new == %LocalUser{name: "josé", nested: %LocalUser.NestedUser{}}
end
defmodule NilUser do
@@ -152,6 +152,6 @@ defmodule MapTest do
end
test "nil user" do
- assert %NilUser{} == %{ __struct__: NilUser, name: nil, contents: %{} }
+ assert %NilUser{} == %{__struct__: NilUser, name: nil, contents: %{}}
end
end
diff --git a/lib/elixir/test/elixir/module/locals_tracker_test.exs b/lib/elixir/test/elixir/module/locals_tracker_test.exs
index 7229b8546..26389570c 100644
--- a/lib/elixir/test/elixir/module/locals_tracker_test.exs
+++ b/lib/elixir/test/elixir/module/locals_tracker_test.exs
@@ -6,7 +6,7 @@ defmodule Module.LocalsTrackerTest do
alias Module.LocalsTracker, as: D
setup do
- { :ok, [pid: D.start_link] }
+ {:ok, [pid: D.start_link]}
end
teardown config do
@@ -17,88 +17,88 @@ defmodule Module.LocalsTrackerTest do
## Locals
test "can add definitions", config do
- D.add_definition(config[:pid], :def, { :foo, 1 })
- D.add_definition(config[:pid], :defp, { :bar, 1 })
+ D.add_definition(config[:pid], :def, {:foo, 1})
+ D.add_definition(config[:pid], :defp, {:bar, 1})
end
test "can add locals", config do
- D.add_definition(config[:pid], :def, { :foo, 1 })
- D.add_local(config[:pid], { :foo, 1 }, { :bar, 1 })
+ D.add_definition(config[:pid], :def, {:foo, 1})
+ D.add_local(config[:pid], {:foo, 1}, {:bar, 1})
end
test "public definitions are always reachable", config do
- D.add_definition(config[:pid], :def, { :public, 1 })
- assert { :public, 1 } in D.reachable(config[:pid])
+ D.add_definition(config[:pid], :def, {:public, 1})
+ assert {:public, 1} in D.reachable(config[:pid])
- D.add_definition(config[:pid], :defmacro, { :public, 2 })
- assert { :public, 2 } in D.reachable(config[:pid])
+ D.add_definition(config[:pid], :defmacro, {:public, 2})
+ assert {:public, 2} in D.reachable(config[:pid])
end
test "private definitions are never reachable", config do
- D.add_definition(config[:pid], :defp, { :private, 1 })
- refute { :private, 1 } in D.reachable(config[:pid])
+ D.add_definition(config[:pid], :defp, {:private, 1})
+ refute {:private, 1} in D.reachable(config[:pid])
- D.add_definition(config[:pid], :defmacrop, { :private, 2 })
- refute { :private, 2 } in D.reachable(config[:pid])
+ D.add_definition(config[:pid], :defmacrop, {:private, 2})
+ refute {:private, 2} in D.reachable(config[:pid])
end
test "private definitions are reachable when connected to local", config do
- D.add_definition(config[:pid], :defp, { :private, 1 })
- refute { :private, 1 } in D.reachable(config[:pid])
+ D.add_definition(config[:pid], :defp, {:private, 1})
+ refute {:private, 1} in D.reachable(config[:pid])
- D.add_local(config[:pid], { :private, 1 })
- assert { :private, 1 } in D.reachable(config[:pid])
+ D.add_local(config[:pid], {:private, 1})
+ assert {:private, 1} in D.reachable(config[:pid])
end
test "private definitions are reachable when connected through a public one", config do
- D.add_definition(config[:pid], :defp, { :private, 1 })
- refute { :private, 1 } in D.reachable(config[:pid])
+ D.add_definition(config[:pid], :defp, {:private, 1})
+ refute {:private, 1} in D.reachable(config[:pid])
- D.add_definition(config[:pid], :def, { :public, 1 })
- D.add_local(config[:pid], { :public, 1 }, { :private, 1 })
- assert { :private, 1 } in D.reachable(config[:pid])
+ D.add_definition(config[:pid], :def, {:public, 1})
+ D.add_local(config[:pid], {:public, 1}, {:private, 1})
+ assert {:private, 1} in D.reachable(config[:pid])
end
@unused [
- { { :private, 1 }, :defp, 0 }
+ {{:private, 1}, :defp, 0}
]
test "unused private definitions are marked as so", config do
- D.add_definition(config[:pid], :def, { :public, 1 })
+ D.add_definition(config[:pid], :def, {:public, 1})
unused = D.collect_unused_locals(config[:pid], @unused)
- assert unused == [{ :unused_def, { :private, 1 }, :defp }]
+ assert unused == [{:unused_def, {:private, 1}, :defp}]
- D.add_local(config[:pid], { :public, 1 }, { :private, 1 })
+ D.add_local(config[:pid], {:public, 1}, {:private, 1})
unused = D.collect_unused_locals(config[:pid], @unused)
- refute unused == [{ :unused_def, { :private, 1 }, :defp }]
+ refute unused == [{:unused_def, {:private, 1}, :defp}]
end
@unused [
- { { :private, 3 }, :defp, 3 }
+ {{:private, 3}, :defp, 3}
]
test "private definitions with unused default arguments", config do
- D.add_definition(config[:pid], :def, { :public, 1 })
+ D.add_definition(config[:pid], :def, {:public, 1})
unused = D.collect_unused_locals(config[:pid], @unused)
- assert unused == [{ :unused_def, { :private, 3 }, :defp }]
+ assert unused == [{:unused_def, {:private, 3}, :defp}]
- D.add_local(config[:pid], { :public, 1 }, { :private, 3 })
+ D.add_local(config[:pid], {:public, 1}, {:private, 3})
unused = D.collect_unused_locals(config[:pid], @unused)
- assert unused == [{ :unused_args, { :private, 3 }}]
+ assert unused == [{:unused_args, {:private, 3}}]
end
test "private definitions with some unused default arguments", config do
- D.add_definition(config[:pid], :def, { :public, 1 })
- D.add_local(config[:pid], { :public, 1 }, { :private, 1 })
+ D.add_definition(config[:pid], :def, {:public, 1})
+ D.add_local(config[:pid], {:public, 1}, {:private, 1})
unused = D.collect_unused_locals(config[:pid], @unused)
- assert unused == [{ :unused_args, { :private, 3 }, 1}]
+ assert unused == [{:unused_args, {:private, 3}, 1}]
end
test "private definitions with all used default arguments", config do
- D.add_definition(config[:pid], :def, { :public, 1 })
- D.add_local(config[:pid], { :public, 1 }, { :private, 0 })
+ D.add_definition(config[:pid], :def, {:public, 1})
+ D.add_local(config[:pid], {:public, 1}, {:private, 0})
unused = D.collect_unused_locals(config[:pid], @unused)
assert unused == []
end
@@ -106,50 +106,50 @@ defmodule Module.LocalsTrackerTest do
## Defaults
test "can add defaults", config do
- D.add_definition(config[:pid], :def, { :foo, 4 })
- D.add_defaults(config[:pid], :def, { :foo, 4 }, 2)
+ D.add_definition(config[:pid], :def, {:foo, 4})
+ D.add_defaults(config[:pid], :def, {:foo, 4}, 2)
end
test "defaults are reachable if public", config do
- D.add_definition(config[:pid], :def, { :foo, 4 })
- D.add_defaults(config[:pid], :def, { :foo, 4 }, 2)
- assert { :foo, 2 } in D.reachable(config[:pid])
- assert { :foo, 3 } in D.reachable(config[:pid])
+ D.add_definition(config[:pid], :def, {:foo, 4})
+ D.add_defaults(config[:pid], :def, {:foo, 4}, 2)
+ assert {:foo, 2} in D.reachable(config[:pid])
+ assert {:foo, 3} in D.reachable(config[:pid])
end
test "defaults are not reachable if private", config do
- D.add_definition(config[:pid], :defp, { :foo, 4 })
- D.add_defaults(config[:pid], :defp, { :foo, 4 }, 2)
- refute { :foo, 2 } in D.reachable(config[:pid])
- refute { :foo, 3 } in D.reachable(config[:pid])
+ D.add_definition(config[:pid], :defp, {:foo, 4})
+ D.add_defaults(config[:pid], :defp, {:foo, 4}, 2)
+ refute {:foo, 2} in D.reachable(config[:pid])
+ refute {:foo, 3} in D.reachable(config[:pid])
end
test "defaults are connected", config do
- D.add_definition(config[:pid], :defp, { :foo, 4 })
- D.add_defaults(config[:pid], :defp, { :foo, 4 }, 2)
- D.add_local(config[:pid], { :foo, 2 })
- assert { :foo, 2 } in D.reachable(config[:pid])
- assert { :foo, 3 } in D.reachable(config[:pid])
- assert { :foo, 4 } in D.reachable(config[:pid])
+ D.add_definition(config[:pid], :defp, {:foo, 4})
+ D.add_defaults(config[:pid], :defp, {:foo, 4}, 2)
+ D.add_local(config[:pid], {:foo, 2})
+ assert {:foo, 2} in D.reachable(config[:pid])
+ assert {:foo, 3} in D.reachable(config[:pid])
+ assert {:foo, 4} in D.reachable(config[:pid])
end
## Imports
test "find imports from dispatch", config do
- D.add_import(config[:pid], nil, Module, { :concat, 1 })
- assert Module in D.imports_with_dispatch(config[:pid], { :concat, 1 })
- refute Module in D.imports_with_dispatch(config[:pid], { :unknown, 1 })
+ D.add_import(config[:pid], nil, Module, {:concat, 1})
+ assert Module in D.imports_with_dispatch(config[:pid], {:concat, 1})
+ refute Module in D.imports_with_dispatch(config[:pid], {:unknown, 1})
end
test "find import conflicts", config do
- refute { [Module], :conflict, 1 } in D.collect_imports_conflicts(config[:pid], [conflict: 1])
+ refute {[Module], :conflict, 1} in D.collect_imports_conflicts(config[:pid], [conflict: 1])
# Calls outside local functions are not triggered
- D.add_import(config[:pid], nil, Module, { :conflict, 1 })
- refute { [Module], :conflict, 1 } in D.collect_imports_conflicts(config[:pid], [conflict: 1])
+ D.add_import(config[:pid], nil, Module, {:conflict, 1})
+ refute {[Module], :conflict, 1} in D.collect_imports_conflicts(config[:pid], [conflict: 1])
- D.add_local(config[:pid], { :foo, 2 })
- D.add_import(config[:pid], { :foo, 2 }, Module, { :conflict, 1 })
- assert { [Module], :conflict, 1 } in D.collect_imports_conflicts(config[:pid], [conflict: 1])
+ D.add_local(config[:pid], {:foo, 2})
+ D.add_import(config[:pid], {:foo, 2}, Module, {:conflict, 1})
+ assert {[Module], :conflict, 1} in D.collect_imports_conflicts(config[:pid], [conflict: 1])
end
end
diff --git a/lib/elixir/test/elixir/module_test.exs b/lib/elixir/test/elixir/module_test.exs
index 2d4f8616d..02cc8dc80 100644
--- a/lib/elixir/test/elixir/module_test.exs
+++ b/lib/elixir/test/elixir/module_test.exs
@@ -8,7 +8,7 @@ defmodule ModuleTest.ToBeUsed do
Module.put_attribute(target, :has_callback, false)
Module.put_attribute(target, :before_compile, __MODULE__)
Module.put_attribute(target, :after_compile, __MODULE__)
- Module.put_attribute(target, :before_compile, { __MODULE__, :callback })
+ Module.put_attribute(target, :before_compile, {__MODULE__, :callback})
quote do: (def line, do: __ENV__.line)
end
@@ -41,7 +41,7 @@ defmodule ModuleTest do
@register_example :it_works
@register_example :still_works
- contents = quote do: (def eval_quoted_info, do: { __MODULE__, __ENV__.file, __ENV__.line })
+ contents = quote do: (def eval_quoted_info, do: {__MODULE__, __ENV__.file, __ENV__.line})
Module.eval_quoted __MODULE__, contents, [], file: "sample.ex", line: 13
defmacrop in_module(block) do
@@ -55,7 +55,7 @@ defmodule ModuleTest do
## Eval
test :eval_quoted do
- assert eval_quoted_info() == { ModuleTest, "sample.ex", 13 }
+ assert eval_quoted_info() == {ModuleTest, "sample.ex", 13}
end
test :line_from_macro do
@@ -90,7 +90,7 @@ defmodule ModuleTest do
assert env.module == ModuleTest.OnDefinition
assert kind == :def
assert name == :hello
- assert [{ :foo, _, _ }, { :bar, _ , _ }] = args
+ assert [{:foo, _, _}, {:bar, _ , _}] = args
assert [] = guards
assert {{:., _, [:erlang, :+]}, _, [{:foo, _, nil}, {:bar, _, nil}]} = expr
end
@@ -125,7 +125,7 @@ defmodule ModuleTest do
test :registered_attributes do
assert [{:register_example, [:it_works]}, {:register_example, [:still_works]}] ==
- Enum.filter __MODULE__.__info__(:attributes), &match?({ :register_example, _ }, &1)
+ Enum.filter __MODULE__.__info__(:attributes), &match?({:register_example, _}, &1)
end
@some_attribute [1]
@@ -168,13 +168,13 @@ defmodule ModuleTest do
## Creation
test :defmodule do
- assert match?({ :module, Defmodule, binary, 3 } when is_binary(binary), defmodule Defmodule do
+ assert match?({:module, Defmodule, binary, 3} when is_binary(binary), defmodule Defmodule do
1 + 2
end)
end
test :defmodule_with_atom do
- assert match?({ :module, :root_defmodule, _, _ }, defmodule :root_defmodule do
+ assert match?({:module, :root_defmodule, _, _}, defmodule :root_defmodule do
:ok
end)
end
@@ -184,7 +184,7 @@ defmodule ModuleTest do
quote do
def world, do: true
end
- { :module, ModuleCreateSample, _, _ } =
+ {:module, ModuleCreateSample, _, _} =
Module.create(ModuleCreateSample, contents, __ENV__)
assert ModuleCreateSample.world
end
@@ -199,18 +199,18 @@ defmodule ModuleTest do
test :defines? do
in_module do
- refute Module.defines? __MODULE__, { :foo, 0 }
+ refute Module.defines? __MODULE__, {:foo, 0}
def foo(), do: bar()
- assert Module.defines? __MODULE__, { :foo, 0 }
- assert Module.defines? __MODULE__, { :foo, 0 }, :def
+ assert Module.defines? __MODULE__, {:foo, 0}
+ assert Module.defines? __MODULE__, {:foo, 0}, :def
- refute Module.defines? __MODULE__, { :bar, 0 }, :defp
+ refute Module.defines? __MODULE__, {:bar, 0}, :defp
defp bar(), do: :ok
- assert Module.defines? __MODULE__, { :bar, 0 }, :defp
+ assert Module.defines? __MODULE__, {:bar, 0}, :defp
- refute Module.defines? __MODULE__, { :baz, 0 }, :defmacro
+ refute Module.defines? __MODULE__, {:baz, 0}, :defmacro
defmacro baz(), do: :ok
- assert Module.defines? __MODULE__, { :baz, 0 }, :defmacro
+ assert Module.defines? __MODULE__, {:baz, 0}, :defmacro
end
end
diff --git a/lib/elixir/test/elixir/option_parser_test.exs b/lib/elixir/test/elixir/option_parser_test.exs
index 5765925a6..4361e0fa9 100644
--- a/lib/elixir/test/elixir/option_parser_test.exs
+++ b/lib/elixir/test/elixir/option_parser_test.exs
@@ -4,121 +4,121 @@ defmodule OptionParserTest do
use ExUnit.Case, async: true
test "parses boolean option" do
- assert OptionParser.parse(["--docs"]) == { [docs: true], [], [] }
+ assert OptionParser.parse(["--docs"]) == {[docs: true], [], []}
end
test "parses alias boolean option as the alias key" do
- assert OptionParser.parse(["-d"], aliases: [d: :docs]) == { [docs: true], [], [] }
+ assert OptionParser.parse(["-d"], aliases: [d: :docs]) == {[docs: true], [], []}
end
test "parses more than one boolean option" do
- assert OptionParser.parse(["--docs", "--compile"]) == { [docs: true, compile: true], [], [] }
+ assert OptionParser.parse(["--docs", "--compile"]) == {[docs: true, compile: true], [], []}
end
test "parses more than one boolean options as the alias" do
- assert OptionParser.parse(["-d", "--compile"], aliases: [d: :docs]) == { [docs: true, compile: true], [], [] }
+ assert OptionParser.parse(["-d", "--compile"], aliases: [d: :docs]) == {[docs: true, compile: true], [], []}
end
test "parses --key value option" do
- assert OptionParser.parse(["--source", "form_docs/"]) == { [source: "form_docs/"], [], [] }
+ assert OptionParser.parse(["--source", "form_docs/"]) == {[source: "form_docs/"], [], []}
end
test "parses --key=value option" do
- assert OptionParser.parse(["--source=form_docs/", "other"]) == { [source: "form_docs/"], ["other"], [] }
+ assert OptionParser.parse(["--source=form_docs/", "other"]) == {[source: "form_docs/"], ["other"], []}
end
test "parses alias --key value option as the alias" do
assert OptionParser.parse(["-s", "from_docs/"], aliases: [s: :source]) ==
- { [source: "from_docs/"], [], [] }
+ {[source: "from_docs/"], [], []}
end
test "parses alias --key=value option as the alias" do
assert OptionParser.parse(["-s=from_docs/", "other"], aliases: [s: :source]) ==
- { [source: "from_docs/"], ["other"], [] }
+ {[source: "from_docs/"], ["other"], []}
end
test "does not parse -- as an alias" do
assert OptionParser.parse(["--s=from_docs/"], aliases: [s: :source]) ==
- { [s: "from_docs/"], [], [] }
+ {[s: "from_docs/"], [], []}
end
test "does not parse - as a switch" do
assert OptionParser.parse(["-source=from_docs/"], aliases: [s: :source]) ==
- { [], [], [source: "from_docs/"] }
+ {[], [], [source: "from_docs/"]}
end
test "parses configured booleans" do
- assert OptionParser.parse(["--docs=false"], switches: [docs: :boolean]) == { [docs: false], [], [] }
- assert OptionParser.parse(["--docs=true"], switches: [docs: :boolean]) == { [docs: true], [], [] }
- assert OptionParser.parse(["--docs=other"], switches: [docs: :boolean]) == { [docs: false], [], [] }
+ assert OptionParser.parse(["--docs=false"], switches: [docs: :boolean]) == {[docs: false], [], []}
+ assert OptionParser.parse(["--docs=true"], switches: [docs: :boolean]) == {[docs: true], [], []}
+ assert OptionParser.parse(["--docs=other"], switches: [docs: :boolean]) == {[docs: false], [], []}
- assert OptionParser.parse(["--docs", "foo"], switches: [docs: :boolean]) == { [docs: true], ["foo"], [] }
- assert OptionParser.parse(["--docs", "foo"], switches: [docs: :boolean]) == { [docs: true], ["foo"], [] }
- assert OptionParser.parse(["--no-docs", "foo"], switches: [docs: :boolean]) == { [docs: false], ["foo"], [] }
+ assert OptionParser.parse(["--docs", "foo"], switches: [docs: :boolean]) == {[docs: true], ["foo"], []}
+ assert OptionParser.parse(["--docs", "foo"], switches: [docs: :boolean]) == {[docs: true], ["foo"], []}
+ assert OptionParser.parse(["--no-docs", "foo"], switches: [docs: :boolean]) == {[docs: false], ["foo"], []}
end
test "does not set unparsed booleans" do
- assert OptionParser.parse(["foo"], switches: [docs: :boolean]) == { [], ["foo"], [] }
+ assert OptionParser.parse(["foo"], switches: [docs: :boolean]) == {[], ["foo"], []}
end
test "keeps options on configured keep" do
assert OptionParser.parse(["--require", "foo", "--require", "bar", "baz"], switches: [require: :keep]) ==
- { [require: "foo", require: "bar"], ["baz"], [] }
+ {[require: "foo", require: "bar"], ["baz"], []}
end
test "parses configured strings" do
- assert OptionParser.parse(["--value", "1", "foo"], switches: [value: :string]) == { [value: "1"], ["foo"], [] }
+ assert OptionParser.parse(["--value", "1", "foo"], switches: [value: :string]) == {[value: "1"], ["foo"], []}
end
test "parses configured integers" do
- assert OptionParser.parse(["--value", "1", "foo"], switches: [value: :integer]) == { [value: 1], ["foo"], [] }
- assert OptionParser.parse(["--value", "WAT", "foo"], switches: [value: :integer]) == { [], ["foo"], [value: "WAT"] }
+ assert OptionParser.parse(["--value", "1", "foo"], switches: [value: :integer]) == {[value: 1], ["foo"], []}
+ assert OptionParser.parse(["--value", "WAT", "foo"], switches: [value: :integer]) == {[], ["foo"], [value: "WAT"]}
end
test "parses configured integers with keep" do
assert OptionParser.parse(["--value", "1", "--value", "2", "foo"],
- switches: [value: [:integer, :keep]]) == { [value: 1, value: 2], ["foo"], [] }
+ switches: [value: [:integer, :keep]]) == {[value: 1, value: 2], ["foo"], []}
end
test "parses configured floats" do
- assert OptionParser.parse(["--value", "1.0", "foo"], switches: [value: :float]) == { [value: 1.0], ["foo"], [] }
- assert OptionParser.parse(["--value", "WAT", "foo"], switches: [value: :float]) == { [], ["foo"], [value: "WAT"] }
+ assert OptionParser.parse(["--value", "1.0", "foo"], switches: [value: :float]) == {[value: 1.0], ["foo"], []}
+ assert OptionParser.parse(["--value", "WAT", "foo"], switches: [value: :float]) == {[], ["foo"], [value: "WAT"]}
end
test "parses no switches as flags" do
- assert OptionParser.parse(["--no-docs", "foo"]) == { [no_docs: true], ["foo"], [] }
+ assert OptionParser.parse(["--no-docs", "foo"]) == {[no_docs: true], ["foo"], []}
end
test "parses more than one key/value options" do
assert OptionParser.parse(["--source", "from_docs/", "--docs", "show"]) ==
- { [source: "from_docs/", docs: "show"], [], [] }
+ {[source: "from_docs/", docs: "show"], [], []}
end
test "overrides options by default" do
assert OptionParser.parse(["--require", "foo", "--require", "bar", "baz"]) ==
- { [require: "bar"], ["baz"], [] }
+ {[require: "bar"], ["baz"], []}
end
test "parses mixed options" do
options = OptionParser.parse(["--source", "from_docs/", "--compile", "-x"], aliases: [x: :x])
- assert options == { [source: "from_docs/", compile: true, x: true], [], [] }
+ assert options == {[source: "from_docs/", compile: true, x: true], [], []}
end
test "stops on first non option arguments" do
options = OptionParser.parse_head(["--source", "from_docs/", "test/enum_test.exs", "--verbose"])
- assert options == { [source: "from_docs/"], ["test/enum_test.exs", "--verbose"], [] }
+ assert options == {[source: "from_docs/"], ["test/enum_test.exs", "--verbose"], []}
end
test "stops on --" do
options = OptionParser.parse(["--source", "from_docs/", "--", "1", "2", "3"])
- assert options == { [source: "from_docs/"], ["--", "1", "2", "3"], [] }
+ assert options == {[source: "from_docs/"], ["--", "1", "2", "3"], []}
options = OptionParser.parse_head(["--source", "from_docs/", "--", "1", "2", "3"])
- assert options == { [source: "from_docs/"], ["--", "1", "2", "3"], [] }
+ assert options == {[source: "from_docs/"], ["--", "1", "2", "3"], []}
end
test "goes beyond the first non option arguments" do
options = OptionParser.parse(["--source", "from_docs/", "test/enum_test.exs", "--verbose"])
- assert options == { [source: "from_docs/", verbose: true], ["test/enum_test.exs"], [] }
+ assert options == {[source: "from_docs/", verbose: true], ["test/enum_test.exs"], []}
end
end
diff --git a/lib/elixir/test/elixir/process_test.exs b/lib/elixir/test/elixir/process_test.exs
index e3e84a888..363d570ad 100644
--- a/lib/elixir/test/elixir/process_test.exs
+++ b/lib/elixir/test/elixir/process_test.exs
@@ -22,7 +22,7 @@ defmodule ProcessTest do
end
defp expand(expr, env) do
- { expr, _env } = :elixir_exp.expand(expr, :elixir_env.ex_to_env(env))
+ {expr, _env} = :elixir_exp.expand(expr, :elixir_env.ex_to_env(env))
expr
end
end
diff --git a/lib/elixir/test/elixir/protocol_test.exs b/lib/elixir/test/elixir/protocol_test.exs
index 47d1969d0..0aad69deb 100644
--- a/lib/elixir/test/elixir/protocol_test.exs
+++ b/lib/elixir/test/elixir/protocol_test.exs
@@ -52,13 +52,13 @@ defmodule ProtocolTest do
defmodule NoImplStruct do
def __struct__ do
- %{ a: 0, b: 0 }
+ %{a: 0, b: 0}
end
end
defmodule ImplStruct do
def __struct__ do
- %{ a: 0, b: 0 }
+ %{a: 0, b: 0}
end
defimpl Sample do
@@ -104,7 +104,7 @@ defmodule ProtocolTest do
test "protocol implementation with any and records fallback" do
assert WithAny.impl_for(ImplRec[]) == WithAny.ProtocolTest.ImplRec
assert WithAny.impl_for(NoImplRec[]) == WithAny.Tuple
- assert WithAny.impl_for({ :foo }) == WithAny.Tuple
+ assert WithAny.impl_for({:foo}) == WithAny.Tuple
assert WithAny.impl_for({}) == WithAny.Tuple
assert WithAny.impl_for(self) == WithAny.Any
end
@@ -112,7 +112,7 @@ defmodule ProtocolTest do
test "protocol implementation with any and structs fallback" do
assert WithAny.impl_for(%ImplStruct{}) == WithAny.ProtocolTest.ImplStruct
assert WithAny.impl_for(%NoImplStruct{}) == WithAny.Any
- assert WithAny.impl_for(%{ __struct__: "foo" }) == WithAny.Map
+ assert WithAny.impl_for(%{__struct__: "foo"}) == WithAny.Map
assert WithAny.impl_for(%{}) == WithAny.Map
assert WithAny.impl_for(self) == WithAny.Any
end
@@ -125,8 +125,8 @@ defmodule ProtocolTest do
test "protocol documentation" do
docs = Sample.__info__(:docs)
- assert { { :ok, 1 }, _, :def, [{ :thing, _, nil }], "Ok" } =
- List.keyfind(docs, { :ok, 1 }, 0)
+ assert {{:ok, 1}, _, :def, [{:thing, _, nil}], "Ok"} =
+ List.keyfind(docs, {:ok, 1}, 0)
end
test "protocol keeps underlying UndefinedFunctionError" do
@@ -155,11 +155,11 @@ defmodule ProtocolTest do
defimpl Attribute, for: ImplStruct do
def test(_) do
- { @protocol, @for }
+ {@protocol, @for}
end
end
- assert Attribute.test(%ImplStruct{}) == { Attribute, ImplStruct }
+ assert Attribute.test(%ImplStruct{}) == {Attribute, ImplStruct}
assert Attribute.ProtocolTest.ImplStruct.__impl__(:protocol) == Attribute
assert Attribute.ProtocolTest.ImplStruct.__impl__(:for) == ImplStruct
assert Attribute.ProtocolTest.ImplStruct.__info__(:attributes)[:impl] ==
@@ -180,8 +180,8 @@ defmodule ProtocolTest do
end
defp get_callbacks(module, name, arity) do
- callbacks = for { :callback, info } <- module.__info__(:attributes), do: hd(info)
- List.keyfind(callbacks, { name, arity }, 0) |> elem(1)
+ callbacks = for {:callback, info} <- module.__info__(:attributes), do: hd(info)
+ List.keyfind(callbacks, {name, arity}, 0) |> elem(1)
end
end
@@ -191,7 +191,7 @@ end
# path = Path.expand("../ebin", __DIR__)
# File.mkdir_p!(path)
#
-# compile = fn { :module, module, binary, _ } ->
+# compile = fn {:module, module, binary, _} ->
# :code.purge(module)
# :code.delete(module)
# File.write!("#{path}/#{module}.beam", binary)
@@ -219,24 +219,24 @@ end
#
# defmodule NoImplStruct do
# def __struct__ do
-# %{ a: 0, b: 0 }
+# %{a: 0, b: 0}
# end
# end
#
# defmodule ImplStruct do
# def __struct__ do
-# %{ a: 0, b: 0 }
+# %{a: 0, b: 0}
# end
# end
#
# Code.append_path(path)
#
# # Any is ignored because there is no fallback
-# { :ok, binary } = Protocol.Consolidation.apply_to(Sample, [Any, ImplStruct])
+# {:ok, binary} = Protocol.Consolidation.apply_to(Sample, [Any, ImplStruct])
# :code.load_binary(Sample, 'protocol_test.exs', binary)
#
# # Any should be moved to the end
-# { :ok, binary } = Protocol.Consolidation.apply_to(WithAny, [Any, ImplStruct, Map])
+# {:ok, binary} = Protocol.Consolidation.apply_to(WithAny, [Any, ImplStruct, Map])
# :code.load_binary(WithAny, 'protocol_test.exs', binary)
#
# test "consolidated implementations without any" do
@@ -265,26 +265,26 @@ end
# test "consolidated implementations with any and tuple fallback" do
# assert WithAny.impl_for(%ImplStruct{}) == WithAny.Protocol.ConsolidationTest.ImplStruct
# assert WithAny.impl_for(%NoImplStruct{}) == WithAny.Any
-# assert WithAny.impl_for(%{ __struct__: "foo" }) == WithAny.Map
+# assert WithAny.impl_for(%{__struct__: "foo"}) == WithAny.Map
# assert WithAny.impl_for(%{}) == WithAny.Map
# assert WithAny.impl_for(self) == WithAny.Any
# end
#
# test "consolidation keeps docs" do
# docs = Sample.__info__(:docs)
-# assert { { :ok, 1 }, _, :def, [{ :thing, _, nil }], "Ok" } =
-# List.keyfind(docs, { :ok, 1 }, 0)
+# assert {{:ok, 1}, _, :def, [{:thing, _, nil}], "Ok"} =
+# List.keyfind(docs, {:ok, 1}, 0)
# end
#
# test "consolidated keeps callbacks" do
-# callbacks = for { :callback, info } <- Sample.__info__(:attributes), do: hd(info)
+# callbacks = for {:callback, info} <- Sample.__info__(:attributes), do: hd(info)
# assert callbacks != []
# end
#
# test "consolidation errors on missing beams" do
# defprotocol NoBeam, do: nil
-# assert Protocol.Consolidation.apply_to(String, []) == { :error, :not_a_protocol }
-# assert Protocol.Consolidation.apply_to(NoBeam, []) == { :error, :no_beam_info }
+# assert Protocol.Consolidation.apply_to(String, []) == {:error, :not_a_protocol}
+# assert Protocol.Consolidation.apply_to(NoBeam, []) == {:error, :no_beam_info}
# end
#
# test "consolidation updates attributes" do
diff --git a/lib/elixir/test/elixir/record_test.exs b/lib/elixir/test/elixir/record_test.exs
index 96eadf4f6..40e1e8474 100644
--- a/lib/elixir/test/elixir/record_test.exs
+++ b/lib/elixir/test/elixir/record_test.exs
@@ -24,7 +24,7 @@ defmodule RecordTest do
defstruct Record.extract(:file_info, from_lib: "kernel/include/file.hrl")
end
- assert %{ __struct__: StructExtract, size: :undefined } =
+ assert %{__struct__: StructExtract, size: :undefined} =
StructExtract.__struct__
end
@@ -34,8 +34,8 @@ defmodule RecordTest do
end
test "record?/2" do
- assert record?({ User, "jose", 27 }, User)
- refute record?({ User, "jose", 27 }, Author)
+ assert record?({User, "jose", 27}, User)
+ refute record?({User, "jose", 27}, Author)
refute record?(13, Author)
end
@@ -45,8 +45,8 @@ defmodule RecordTest do
end
test "record?/1" do
- assert record?({ User, "jose", 27 })
- refute record?({ "jose", 27 })
+ assert record?({User, "jose", 27})
+ refute record?({"jose", 27})
refute record?(13)
end
diff --git a/lib/elixir/test/elixir/regex_test.exs b/lib/elixir/test/elixir/regex_test.exs
index 328843624..3ab7d66f7 100644
--- a/lib/elixir/test/elixir/regex_test.exs
+++ b/lib/elixir/test/elixir/regex_test.exs
@@ -9,7 +9,7 @@ defmodule RegexTest do
end
test :precedence do
- assert { "aa", :unknown } |> elem(0) =~ ~r/(a)\1/
+ assert {"aa", :unknown} |> elem(0) =~ ~r/(a)\1/
end
test :backreference do
@@ -25,14 +25,14 @@ defmodule RegexTest do
end
test :compile do
- { :ok, regex } = Regex.compile("foo")
+ {:ok, regex} = Regex.compile("foo")
assert Regex.regex?(regex)
- assert { :error, _ } = Regex.compile("*foo")
- assert { :error, _ } = Regex.compile("foo", "y")
+ assert {:error, _} = Regex.compile("*foo")
+ assert {:error, _} = Regex.compile("foo", "y")
end
test :compile_with_erl_opts do
- { :ok, regex } = Regex.compile("foo\\sbar", [:dotall, {:newline, :anycrlf}])
+ {:ok, regex} = Regex.compile("foo\\sbar", [:dotall, {:newline, :anycrlf}])
assert "foo\nbar" =~ regex
end
@@ -75,10 +75,10 @@ defmodule RegexTest do
end
test :named_captures do
- assert Regex.named_captures(~r/(?<foo>c)(?<bar>d)/, "abcd") == %{ "bar" => "d", "foo" => "c" }
- assert Regex.named_captures(~r/c(?<foo>d)/, "abcd") == %{ "foo" => "d" }
+ assert Regex.named_captures(~r/(?<foo>c)(?<bar>d)/, "abcd") == %{"bar" => "d", "foo" => "c"}
+ assert Regex.named_captures(~r/c(?<foo>d)/, "abcd") == %{"foo" => "d"}
assert Regex.named_captures(~r/c(?<foo>d)/, "no_match") == nil
- assert Regex.named_captures(~r/c(?<foo>d|e)/, "abcd abce") == %{ "foo" => "d" }
+ assert Regex.named_captures(~r/c(?<foo>d|e)/, "abcd abce") == %{"foo" => "d"}
assert Regex.named_captures(~r/c(.)/, "cat") == %{}
end
diff --git a/lib/elixir/test/elixir/set_test.exs b/lib/elixir/test/elixir/set_test.exs
index 452d660d6..d37d8cdcf 100644
--- a/lib/elixir/test/elixir/set_test.exs
+++ b/lib/elixir/test/elixir/set_test.exs
@@ -3,18 +3,18 @@ Code.require_file "test_helper.exs", __DIR__
# A TestSet implementation used only for testing.
defmodule TestSet do
def new(list \\ []) when is_list(list) do
- { TestSet, list }
+ {TestSet, list}
end
- def reduce({ TestSet, list }, acc, fun) do
+ def reduce({TestSet, list}, acc, fun) do
Enumerable.reduce(list, acc, fun)
end
- def member?({ TestSet, list }, v) do
+ def member?({TestSet, list}, v) do
v in list
end
- def size({ TestSet, list }) do
+ def size({TestSet, list}) do
length(list)
end
end
diff --git a/lib/elixir/test/elixir/stream_test.exs b/lib/elixir/test/elixir/stream_test.exs
index bbc753c7f..4f2b25c87 100644
--- a/lib/elixir/test/elixir/stream_test.exs
+++ b/lib/elixir/test/elixir/stream_test.exs
@@ -146,20 +146,20 @@ defmodule StreamTest do
par = self
pid = spawn_link fn ->
Enum.each Stream.drop(&inbox_stream/2, -3),
- fn x -> send par, { :stream, x } end
+ fn x -> send par, {:stream, x} end
end
- send pid, { :stream, 1 }
- send pid, { :stream, 2 }
- send pid, { :stream, 3 }
- refute_receive { :stream, 1 }
+ send pid, {:stream, 1}
+ send pid, {:stream, 2}
+ send pid, {:stream, 3}
+ refute_receive {:stream, 1}
- send pid, { :stream, 4 }
- assert_receive { :stream, 1 }
+ send pid, {:stream, 4}
+ assert_receive {:stream, 1}
- send pid, { :stream, 5 }
- assert_receive { :stream, 2 }
- refute_receive { :stream, 3 }
+ send pid, {:stream, 5}
+ assert_receive {:stream, 2}
+ refute_receive {:stream, 3}
end
test "drop_while/2" do
@@ -246,7 +246,7 @@ defmodule StreamTest do
stream = Stream.flat_map [1,2,3],
fn i ->
Stream.resource(fn -> i end,
- fn acc -> { acc, acc + 1 } end,
+ fn acc -> {acc, acc + 1} end,
fn _ -> Process.put(:stream_flat_map, true) end)
end
@@ -257,7 +257,7 @@ defmodule StreamTest do
test "flat_map/2 does not leave outer stream suspended" do
stream = Stream.resource(fn -> 1 end,
- fn acc -> { acc, acc + 1 } end,
+ fn acc -> {acc, acc + 1} end,
fn _ -> Process.put(:stream_flat_map, true) end)
stream = Stream.flat_map(stream, fn i -> [i, i + 1, i + 2] end)
@@ -268,7 +268,7 @@ defmodule StreamTest do
test "flat_map/2 closes on error" do
stream = Stream.resource(fn -> 1 end,
- fn acc -> { acc, acc + 1 } end,
+ fn acc -> {acc, acc + 1} end,
fn _ -> Process.put(:stream_flat_map, true) end)
stream = Stream.flat_map(stream, fn _ -> throw(:error) end)
@@ -324,19 +324,19 @@ defmodule StreamTest do
end
test "transform/3" do
- stream = Stream.transform([1, 2, 3], 0, &{ [&1, &2], &1 + &2 })
+ stream = Stream.transform([1, 2, 3], 0, &{[&1, &2], &1 + &2})
assert is_lazy(stream)
assert Enum.to_list(stream) == [1, 0, 2, 1, 3, 3]
nats = Stream.iterate(1, &(&1 + 1))
- assert Stream.transform(nats, 0, &{ [&1, &2], &1 + &2 }) |> Enum.take(6) == [1, 0, 2, 1, 3, 3]
+ assert Stream.transform(nats, 0, &{[&1, &2], &1 + &2}) |> Enum.take(6) == [1, 0, 2, 1, 3, 3]
end
test "transform/3 with halt" do
stream = Stream.resource(fn -> 1 end,
- fn acc -> { acc, acc + 1 } end,
+ fn acc -> {acc, acc + 1} end,
fn _ -> Process.put(:stream_transform, true) end)
- stream = Stream.transform(stream, 0, fn i, acc -> if acc < 3, do: { [i], acc + 1 }, else: { :halt, acc } end)
+ stream = Stream.transform(stream, 0, fn i, acc -> if acc < 3, do: {[i], acc + 1}, else: {:halt, acc} end)
Process.put(:stream_transform, false)
assert Enum.to_list(stream) == [1,2,3]
@@ -383,7 +383,7 @@ defmodule StreamTest do
test "resource/3 closes on errors" do
stream = Stream.resource(fn -> 1 end,
- fn acc -> { acc, acc + 1 } end,
+ fn acc -> {acc, acc + 1} end,
fn _ -> Process.put(:stream_resource, true) end)
Process.put(:stream_resource, false)
@@ -395,7 +395,7 @@ defmodule StreamTest do
test "resource/3 is zippable" do
stream = Stream.resource(fn -> 1 end,
fn 10 -> nil
- acc -> { acc, acc + 1 }
+ acc -> {acc, acc + 1}
end,
fn _ -> Process.put(:stream_resource, true) end)
@@ -434,7 +434,7 @@ defmodule StreamTest do
assert Stream.take(stream, 5) |> Enum.to_list == [6,7,8,9,10]
stream = 1..5 |> Stream.take(10) |> Stream.drop(15)
- assert { [], [] } = Enum.split(stream, 5)
+ assert {[], []} = Enum.split(stream, 5)
stream = 1..20 |> Stream.take(10 + 5) |> Stream.drop(4)
assert Enum.to_list(stream) == [5,6,7,8,9,10,11,12,13,14,15]
@@ -522,7 +522,7 @@ defmodule StreamTest do
test "zip/2 does not leave streams suspended" do
stream = Stream.resource(fn -> 1 end,
- fn acc -> { acc, acc + 1 } end,
+ fn acc -> {acc, acc + 1} end,
fn _ -> Process.put(:stream_zip, true) end)
Process.put(:stream_zip, false)
@@ -530,13 +530,13 @@ defmodule StreamTest do
assert Process.get(:stream_zip)
Process.put(:stream_zip, false)
- assert Stream.zip(stream, [:a, :b, :c]) |> Enum.to_list == [{ 1, :a }, { 2, :b }, { 3, :c }]
+ assert Stream.zip(stream, [:a, :b, :c]) |> Enum.to_list == [{1, :a}, {2, :b}, {3, :c}]
assert Process.get(:stream_zip)
end
test "zip/2 does not leave streams suspended on halt" do
stream = Stream.resource(fn -> 1 end,
- fn acc -> { acc, acc + 1 } end,
+ fn acc -> {acc, acc + 1} end,
fn _ -> Process.put(:stream_zip, :done) end)
assert Stream.zip([:a, :b, :c, :d, :e], stream) |> Enum.take(3) ==
@@ -579,23 +579,23 @@ defmodule StreamTest do
defp collectable_pdict do
fn
- _, { :cont, x } -> Process.put(:stream_cont, [x|Process.get(:stream_cont)])
+ _, {:cont, x} -> Process.put(:stream_cont, [x|Process.get(:stream_cont)])
_, :done -> Process.put(:stream_done, true)
_, :halt -> Process.put(:stream_halt, true)
end
end
- defp inbox_stream({ :suspend, acc }, f) do
- { :suspended, acc, &inbox_stream(&1, f) }
+ defp inbox_stream({:suspend, acc}, f) do
+ {:suspended, acc, &inbox_stream(&1, f)}
end
- defp inbox_stream({ :halt, acc }, _f) do
- { :halted, acc }
+ defp inbox_stream({:halt, acc}, _f) do
+ {:halted, acc}
end
- defp inbox_stream({ :cont, acc }, f) do
+ defp inbox_stream({:cont, acc}, f) do
receive do
- { :stream, item } ->
+ {:stream, item} ->
inbox_stream(f.(item, acc), f)
end
end
diff --git a/lib/elixir/test/elixir/string_io_test.exs b/lib/elixir/test/elixir/string_io_test.exs
index 170166c9d..da589631d 100644
--- a/lib/elixir/test/elixir/string_io_test.exs
+++ b/lib/elixir/test/elixir/string_io_test.exs
@@ -4,19 +4,19 @@ defmodule StringIOTest do
use ExUnit.Case, async: true
test "start and stop" do
- { :ok, pid } = StringIO.open("")
- assert StringIO.close(pid) == { :ok, { "", "" } }
+ {:ok, pid} = StringIO.open("")
+ assert StringIO.close(pid) == {:ok, {"", ""}}
end
test "start_link and stop" do
- { :ok, pid } = StringIO.open("")
- assert StringIO.close(pid) == { :ok, { "", "" } }
+ {:ok, pid} = StringIO.open("")
+ assert StringIO.close(pid) == {:ok, {"", ""}}
end
test "peek" do
- { :ok, pid } = StringIO.open("abc")
+ {:ok, pid} = StringIO.open("abc")
IO.write(pid, "edf")
- assert StringIO.contents(pid) == { "abc", "edf" }
+ assert StringIO.contents(pid) == {"abc", "edf"}
end
## IO module
@@ -33,27 +33,27 @@ defmodule StringIOTest do
pid = start("abc\n")
assert IO.read(pid, :line) == "abc\n"
assert IO.read(pid, :line) == :eof
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.read :line with \\rn" do
pid = start("abc\r\n")
assert IO.read(pid, :line) == "abc\n"
assert IO.read(pid, :line) == :eof
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.read :line without line break" do
pid = start("abc")
assert IO.read(pid, :line) == "abc"
assert IO.read(pid, :line) == :eof
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.read :line with invalid utf8" do
pid = start(<< 130, 227, 129, 132, 227, 129, 134 >>)
- assert IO.read(pid, :line) == { :error, :collect_line }
- assert contents(pid) == { << 130, 227, 129, 132, 227, 129, 134 >>, "" }
+ assert IO.read(pid, :line) == {:error, :collect_line}
+ assert contents(pid) == {<< 130, 227, 129, 132, 227, 129, 134 >>, ""}
end
test "IO.read count" do
@@ -61,7 +61,7 @@ defmodule StringIOTest do
assert IO.read(pid, 2) == "ab"
assert IO.read(pid, 8) == "c"
assert IO.read(pid, 1) == :eof
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.read count with utf8" do
@@ -69,34 +69,34 @@ defmodule StringIOTest do
assert IO.read(pid, 2) == "あい"
assert IO.read(pid, 8) == "う"
assert IO.read(pid, 1) == :eof
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.read count with invalid utf8" do
pid = start(<< 130, 227, 129, 132, 227, 129, 134 >>)
- assert IO.read(pid, 2) == { :error, :invalid_unicode }
- assert contents(pid) == { << 130, 227, 129, 132, 227, 129, 134 >>, "" }
+ assert IO.read(pid, 2) == {:error, :invalid_unicode}
+ assert contents(pid) == {<< 130, 227, 129, 132, 227, 129, 134 >>, ""}
end
test "IO.binread :line with \\n" do
pid = start("abc\n")
assert IO.binread(pid, :line) == "abc\n"
assert IO.binread(pid, :line) == :eof
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.binread :line with \\r\\n" do
pid = start("abc\r\n")
assert IO.binread(pid, :line) == "abc\n"
assert IO.binread(pid, :line) == :eof
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.binread :line without line break" do
pid = start("abc")
assert IO.binread(pid, :line) == "abc"
assert IO.binread(pid, :line) == :eof
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.binread count" do
@@ -104,7 +104,7 @@ defmodule StringIOTest do
assert IO.binread(pid, 2) == "ab"
assert IO.binread(pid, 8) == "c"
assert IO.binread(pid, 1) == :eof
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.binread count with utf8" do
@@ -112,109 +112,109 @@ defmodule StringIOTest do
assert IO.binread(pid, 2) == << 227, 129 >>
assert IO.binread(pid, 8) == << 130, 227, 129, 132, 227, 129, 134 >>
assert IO.binread(pid, 1) == :eof
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.write" do
pid = start("")
assert IO.write(pid, "foo") == :ok
- assert contents(pid) == { "", "foo" }
+ assert contents(pid) == {"", "foo"}
end
test "IO.write with utf8" do
pid = start("")
assert IO.write(pid, "あいう") == :ok
- assert contents(pid) == { "", "あいう" }
+ assert contents(pid) == {"", "あいう"}
end
test "IO.binwrite" do
pid = start("")
assert IO.binwrite(pid, "foo") == :ok
- assert contents(pid) == { "", "foo" }
+ assert contents(pid) == {"", "foo"}
end
test "IO.binwrite with utf8" do
pid = start("")
assert IO.binwrite(pid, "あいう") == :ok
- assert contents(pid) == { "", "あいう" }
+ assert contents(pid) == {"", "あいう"}
end
test "IO.puts" do
pid = start("")
assert IO.puts(pid, "abc") == :ok
- assert contents(pid) == { "", "abc\n" }
+ assert contents(pid) == {"", "abc\n"}
end
test "IO.inspect" do
pid = start("")
assert IO.inspect(pid, {}, []) == {}
- assert contents(pid) == { "", "{}\n" }
+ assert contents(pid) == {"", "{}\n"}
end
test "IO.getn" do
pid = start("abc")
assert IO.getn(pid, ">", 2) == "ab"
- assert contents(pid) == { "c", "" }
+ assert contents(pid) == {"c", ""}
end
test "IO.getn with utf8" do
pid = start("あいう")
assert IO.getn(pid, ">", 2) == "あい"
- assert contents(pid) == { "う", "" }
+ assert contents(pid) == {"う", ""}
end
test "IO.getn with invalid utf8" do
pid = start(<< 130, 227, 129, 132, 227, 129, 134 >>)
- assert IO.getn(pid, ">", 2) == { :error, :invalid_unicode }
- assert contents(pid) == { << 130, 227, 129, 132, 227, 129, 134 >>, "" }
+ assert IO.getn(pid, ">", 2) == {:error, :invalid_unicode}
+ assert contents(pid) == {<< 130, 227, 129, 132, 227, 129, 134 >>, ""}
end
test "IO.getn with capture_prompt" do
pid = start("abc", capture_prompt: true)
assert IO.getn(pid, ">", 2) == "ab"
- assert contents(pid) == { "c", ">" }
+ assert contents(pid) == {"c", ">"}
end
test "IO.gets with \\n" do
pid = start("abc\nd")
assert IO.gets(pid, ">") == "abc\n"
- assert contents(pid) == { "d", "" }
+ assert contents(pid) == {"d", ""}
end
test "IO.gets with \\r\\n" do
pid = start("abc\r\nd")
assert IO.gets(pid, ">") == "abc\n"
- assert contents(pid) == { "d", "" }
+ assert contents(pid) == {"d", ""}
end
test "IO.gets without line breaks" do
pid = start("abc")
assert IO.gets(pid, ">") == "abc"
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.gets with invalid utf8" do
pid = start(<< 130, 227, 129, 132, 227, 129, 134 >>)
- assert IO.gets(pid, ">") == { :error, :collect_line }
- assert contents(pid) == { << 130, 227, 129, 132, 227, 129, 134 >>, "" }
+ assert IO.gets(pid, ">") == {:error, :collect_line}
+ assert contents(pid) == {<< 130, 227, 129, 132, 227, 129, 134 >>, ""}
end
test "IO.gets with capture_prompt" do
pid = start("abc\n", capture_prompt: true)
assert IO.gets(pid, ">") == "abc\n"
- assert contents(pid) == { "", ">" }
+ assert contents(pid) == {"", ">"}
end
test ":io.get_password" do
pid = start("abc\n")
assert :io.get_password(pid) == "abc\n"
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.stream" do
pid = start("abc")
assert IO.stream(pid, 2) |> Enum.to_list == ["ab", "c"]
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
test "IO.stream with invalid utf8" do
@@ -222,12 +222,12 @@ defmodule StringIOTest do
assert_raise IO.StreamError, fn->
IO.stream(pid, 2) |> Enum.to_list
end
- assert contents(pid) == { << 130, 227, 129, 132, 227, 129, 134 >>, "" }
+ assert contents(pid) == {<< 130, 227, 129, 132, 227, 129, 134 >>, ""}
end
test "IO.binstream" do
pid = start("abc")
assert IO.stream(pid, 2) |> Enum.to_list == ["ab", "c"]
- assert contents(pid) == { "", "" }
+ assert contents(pid) == {"", ""}
end
end
diff --git a/lib/elixir/test/elixir/string_test.exs b/lib/elixir/test/elixir/string_test.exs
index c6ebfd501..9532d8b40 100644
--- a/lib/elixir/test/elixir/string_test.exs
+++ b/lib/elixir/test/elixir/string_test.exs
@@ -10,8 +10,8 @@ defmodule StringTest do
end
test :next_codepoint do
- assert String.next_codepoint("ésoj") == { "é", "soj" }
- assert String.next_codepoint(<<255>>) == { <<255>>, "" }
+ assert String.next_codepoint("ésoj") == {"é", "soj"}
+ assert String.next_codepoint(<<255>>) == {<<255>>, ""}
assert String.next_codepoint("") == nil
end
@@ -430,10 +430,10 @@ defmodule StringTest do
end
test :from_char_list do
- assert String.from_char_data([?æ, ?ß]) == { :ok, "æß" }
- assert String.from_char_data([?a, ?b, ?c]) == { :ok, "abc" }
+ assert String.from_char_data([?æ, ?ß]) == {:ok, "æß"}
+ assert String.from_char_data([?a, ?b, ?c]) == {:ok, "abc"}
- assert String.from_char_data([0xDFFF]) == { :error, "", [0xDFFF] }
+ assert String.from_char_data([0xDFFF]) == {:error, "", [0xDFFF]}
end
test :from_char_list! do
diff --git a/lib/elixir/test/elixir/supervisor/behaviour_test.exs b/lib/elixir/test/elixir/supervisor/behaviour_test.exs
index 6f2d33aaf..6acf3aca0 100644
--- a/lib/elixir/test/elixir/supervisor/behaviour_test.exs
+++ b/lib/elixir/test/elixir/supervisor/behaviour_test.exs
@@ -27,65 +27,65 @@ defmodule Supervisor.BehaviourTest do
test :start_link do
assert :ignore = :supervisor.start_link(Sup, :noop)
- assert { :ok, pid } = :supervisor.start_link(Sup, :ok)
+ assert {:ok, pid} = :supervisor.start_link(Sup, :ok)
assert is_pid(pid)
end
test :worker do
assert worker(Foo, [1, 2, 3]) == {
Foo,
- { Foo, :start_link, [1, 2, 3] },
+ {Foo, :start_link, [1, 2, 3]},
:permanent,
5000,
:worker,
[Foo]
- }
+ }
opts = [id: :sample, function: :start, modules: :dynamic,
restart: :temporary, shutdown: :brutal_kill]
assert worker(Foo, [1, 2, 3], opts) == {
:sample,
- { Foo, :start, [1, 2, 3] },
+ {Foo, :start, [1, 2, 3]},
:temporary,
:brutal_kill,
:worker,
:dynamic
- }
+ }
end
test :supervisor do
assert supervisor(Foo, [1, 2, 3]) == {
Foo,
- { Foo, :start_link, [1, 2, 3] },
+ {Foo, :start_link, [1, 2, 3]},
:permanent,
:infinity,
:supervisor,
[Foo]
- }
+ }
opts = [id: :sample, function: :start, modules: :dynamic,
restart: :temporary, shutdown: :brutal_kill]
assert supervisor(Foo, [1, 2, 3], opts) == {
:sample,
- { Foo, :start, [1, 2, 3] },
+ {Foo, :start, [1, 2, 3]},
:temporary,
:brutal_kill,
:supervisor,
:dynamic
- }
+ }
end
test :supervise do
assert supervise([], strategy: :one_for_one) == {
- :ok, { { :one_for_one, 5, 5 }, [] }
- }
+ :ok, {{:one_for_one, 5, 5}, []}
+ }
opts = [strategy: :one_for_all, max_restarts: 1, max_seconds: 1]
assert supervise([:sample], opts) == {
- :ok, { { :one_for_all, 1, 1 }, [:sample] }
- }
+ :ok, {{:one_for_all, 1, 1}, [:sample]}
+ }
end
end
diff --git a/lib/elixir/test/elixir/system_test.exs b/lib/elixir/test/elixir/system_test.exs
index bef38e401..c13b0d9b6 100644
--- a/lib/elixir/test/elixir/system_test.exs
+++ b/lib/elixir/test/elixir/system_test.exs
@@ -39,7 +39,7 @@ defmodule SystemTest do
test "argv" do
list = elixir('-e "IO.inspect System.argv" -- -o opt arg1 arg2 --long-opt 10')
- { args, _ } = Code.eval_string list, []
+ {args, _} = Code.eval_string list, []
assert args == ["-o", "opt", "arg1", "arg2", "--long-opt", "10"]
end
@@ -54,7 +54,7 @@ defmodule SystemTest do
System.delete_env(@test_var)
assert System.get_env(@test_var) == nil
- System.put_env(%{ @test_var => "OTHER_SAMPLE" })
+ System.put_env(%{@test_var => "OTHER_SAMPLE"})
assert System.get_env(@test_var) == "OTHER_SAMPLE"
end
diff --git a/lib/elixir/test/elixir/test_helper.exs b/lib/elixir/test/elixir/test_helper.exs
index 852760f38..3850ad2bb 100644
--- a/lib/elixir/test/elixir/test_helper.exs
+++ b/lib/elixir/test/elixir/test_helper.exs
@@ -43,7 +43,7 @@ defmodule PathHelpers do
Path.expand("../../../../bin/#{name}#{executable_extension}", __DIR__)
end
- if match? { :win32, _ }, :os.type do
+ if match? {:win32, _}, :os.type do
def is_win?, do: true
def executable_extension, do: ".bat"
def redirect_std_err_on_win, do: " 2>&1"
@@ -59,7 +59,7 @@ defmodule CompileAssertion do
def assert_compile_fail(exception, string) do
case format_rescue(string) do
- { ^exception, _ } -> :ok
+ {^exception, _} -> :ok
error ->
raise ExUnit.AssertionError,
left: inspect(elem(error, 0)),
@@ -70,7 +70,7 @@ defmodule CompileAssertion do
def assert_compile_fail(exception, message, string) do
case format_rescue(string) do
- { ^exception, ^message } -> :ok
+ {^exception, ^message} -> :ok
error ->
raise ExUnit.AssertionError,
left: "#{inspect elem(error, 0)}[message: #{inspect elem(error, 1)}]",
@@ -84,7 +84,7 @@ defmodule CompileAssertion do
:elixir.eval(to_char_list(expr), [])
nil
rescue
- error -> { error.__record__(:name), error.message }
+ error -> {error.__record__(:name), error.message}
end
result || flunk(message: "Expected expression to fail")
diff --git a/lib/elixir/test/elixir/tuple_test.exs b/lib/elixir/test/elixir/tuple_test.exs
index 63e14edc1..a38f8f514 100644
--- a/lib/elixir/test/elixir/tuple_test.exs
+++ b/lib/elixir/test/elixir/tuple_test.exs
@@ -4,41 +4,41 @@ defmodule TupleTest do
use ExUnit.Case, async: true
test :elem do
- assert elem({ :a, :b, :c }, 1) == :b
+ assert elem({:a, :b, :c}, 1) == :b
end
test :set_elem do
- assert set_elem({ :a, :b, :c }, 1, :d) == { :a, :d, :c }
+ assert set_elem({:a, :b, :c}, 1, :d) == {:a, :d, :c}
end
test :keywords do
- assert { 1, 2, three: :four } == { 1, 2, [three: :four] }
- assert { one: :two } == { [one: :two] }
+ assert {1, 2, three: :four} == {1, 2, [three: :four]}
+ assert {one: :two} == {[one: :two]}
end
test :optional_comma do
- assert { 1 } == { 1, }
- assert { 1, 2, 3 } == { 1, 2, 3, }
+ assert {1} == {1,}
+ assert {1, 2, 3} == {1, 2, 3,}
end
test :partial_application do
- assert (&{ &1, 2 }).(1) == { 1, 2 }
- assert (&{ &1, &2 }).(1, 2) == { 1, 2 }
- assert (&{ &2, &1 }).(2, 1) == { 1, 2 }
+ assert (&{&1, 2}).(1) == {1, 2}
+ assert (&{&1, &2}).(1, 2) == {1, 2}
+ assert (&{&2, &1}).(2, 1) == {1, 2}
end
# Tuple module
test :duplicate do
assert Tuple.duplicate(:foo, 0) == {}
- assert Tuple.duplicate(:foo, 3) == { :foo, :foo, :foo }
+ assert Tuple.duplicate(:foo, 3) == {:foo, :foo, :foo}
end
test :insert_at do
- assert Tuple.insert_at({ :bar, :baz }, 0, :foo) == { :foo, :bar, :baz }
+ assert Tuple.insert_at({:bar, :baz}, 0, :foo) == {:foo, :bar, :baz}
end
test :delete_at do
- assert Tuple.delete_at({ :foo, :bar, :baz }, 0) == { :bar, :baz }
+ assert Tuple.delete_at({:foo, :bar, :baz}, 0) == {:bar, :baz}
end
end \ No newline at end of file
diff --git a/lib/elixir/test/elixir/version_test.exs b/lib/elixir/test/elixir/version_test.exs
index 9b638aac3..e5e956155 100644
--- a/lib/elixir/test/elixir/version_test.exs
+++ b/lib/elixir/test/elixir/version_test.exs
@@ -55,11 +55,11 @@ defmodule VersionTest do
end
test "parse" do
- assert { :ok, V.Schema[major: 1, minor: 2, patch: 3] } = V.parse("1.2.3")
- assert { :ok, V.Schema[major: 1, minor: 4, patch: 5] } = V.parse("1.4.5+ignore")
- assert { :ok, V.Schema[major: 1, minor: 4, patch: 5, pre: ["6-g3318bd5"]] } = V.parse("1.4.5-6-g3318bd5")
- assert { :ok, V.Schema[major: 1, minor: 4, patch: 5, pre: [6, 7, "eight"]] } = V.parse("1.4.5-6.7.eight")
- assert { :ok, V.Schema[major: 1, minor: 4, patch: 5, pre: ["6-g3318bd5"]] } = V.parse("1.4.5-6-g3318bd5+ignore")
+ assert {:ok, V.Schema[major: 1, minor: 2, patch: 3]} = V.parse("1.2.3")
+ assert {:ok, V.Schema[major: 1, minor: 4, patch: 5]} = V.parse("1.4.5+ignore")
+ assert {:ok, V.Schema[major: 1, minor: 4, patch: 5, pre: ["6-g3318bd5"]]} = V.parse("1.4.5-6-g3318bd5")
+ assert {:ok, V.Schema[major: 1, minor: 4, patch: 5, pre: [6, 7, "eight"]]} = V.parse("1.4.5-6.7.eight")
+ assert {:ok, V.Schema[major: 1, minor: 4, patch: 5, pre: ["6-g3318bd5"]]} = V.parse("1.4.5-6-g3318bd5+ignore")
assert :error = V.parse("foobar")
assert :error = V.parse("2.3")
diff --git a/lib/elixir/test/erlang/atom_test.erl b/lib/elixir/test/erlang/atom_test.erl
index 809026077..3b84b90fb 100644
--- a/lib/elixir/test/erlang/atom_test.erl
+++ b/lib/elixir/test/erlang/atom_test.erl
@@ -4,8 +4,8 @@
-include_lib("eunit/include/eunit.hrl").
eval(Content) ->
- { Value, Binding, _, _ } = elixir:eval(Content, []),
- { Value, Binding }.
+ {Value, Binding, _, _} = elixir:eval(Content, []),
+ {Value, Binding}.
kv([{Key,nil}]) -> Key.
diff --git a/lib/elixir/test/erlang/control_test.erl b/lib/elixir/test/erlang/control_test.erl
index d56cf38e0..99a17a80f 100644
--- a/lib/elixir/test/erlang/control_test.erl
+++ b/lib/elixir/test/erlang/control_test.erl
@@ -3,12 +3,12 @@
-include_lib("eunit/include/eunit.hrl").
eval(Content) ->
- { Value, Binding, _, _ } = elixir:eval(Content, []),
- { Value, Binding }.
+ {Value, Binding, _, _} = elixir:eval(Content, []),
+ {Value, Binding}.
to_erl(String) ->
Forms = elixir:'string_to_quoted!'(String, 1, <<"nofile">>, []),
- { Expr, _, _ } = elixir:quoted_to_erl(Forms, elixir:env_for_eval([])),
+ {Expr, _, _} = elixir:quoted_to_erl(Forms, elixir:env_for_eval([])),
Expr.
% Booleans
@@ -272,28 +272,28 @@ oror_test() ->
% Optimized
optimized_if_test() ->
- { 'case', _, _,
+ {'case', _, _,
[{clause,_,[{atom,_,false}],[],[{atom,_,else}]},
{clause,_,[{atom,_,true}],[],[{atom,_,do}]}]
- } = to_erl("if is_list([]), do: :do, else: :else").
+ } = to_erl("if is_list([]), do: :do, else: :else").
optimized_andand_test() ->
- { 'case', _, _,
+ {'case', _, _,
[{clause,_,
[{var,_,Var}],
[[{op,_,'orelse',_,_}]],
[{var,_,Var}]},
{clause,_,[{var,_,'_'}],[],[{atom,0,done}]}]
- } = to_erl("is_list([]) && :done").
+ } = to_erl("is_list([]) && :done").
optimized_oror_test() ->
- { 'case', _, _,
+ {'case', _, _,
[{clause,1,
[{var,1,_}],
[[{op,1,'orelse',_,_}]],
[{atom,0,done}]},
{clause,1,[{var,1,Var}],[],[{var,1,Var}]}]
- } = to_erl("is_list([]) || :done").
+ } = to_erl("is_list([]) || :done").
no_after_in_try_test() ->
- { 'try', _, [_], [_], _, [] } = to_erl("try do :foo.bar() else _ -> :ok end"). \ No newline at end of file
+ {'try', _, [_], [_], _, []} = to_erl("try do :foo.bar() else _ -> :ok end"). \ No newline at end of file
diff --git a/lib/elixir/test/erlang/function_test.erl b/lib/elixir/test/erlang/function_test.erl
index 1e42b132f..8f875a581 100644
--- a/lib/elixir/test/erlang/function_test.erl
+++ b/lib/elixir/test/erlang/function_test.erl
@@ -2,8 +2,8 @@
-include_lib("eunit/include/eunit.hrl").
eval(Content) ->
- { Value, Binding, _, _ } = elixir:eval(Content, []),
- { Value, Binding }.
+ {Value, Binding, _, _} = elixir:eval(Content, []),
+ {Value, Binding}.
function_arg_do_end_test() ->
{3, _} = eval("if true do\n1 + 2\nend"),
@@ -18,14 +18,14 @@ function_stab_end_test() ->
3 = Fun3().
function_stab_many_test() ->
- {_, [{a, Fun}]} = eval("a = fn\n{ :foo, x } -> x\n{ :bar, x } -> x\nend"),
- 1 = Fun({ foo, 1 }),
- 2 = Fun({ bar, 2 }).
+ {_, [{a, Fun}]} = eval("a = fn\n{:foo, x} -> x\n{:bar, x} -> x\nend"),
+ 1 = Fun({foo, 1}),
+ 2 = Fun({bar, 2}).
function_stab_inline_test() ->
- {_, [{a, Fun}]} = eval("a = fn { :foo, x } -> x; { :bar, x } -> x end"),
- 1 = Fun({ foo, 1 }),
- 2 = Fun({ bar, 2 }).
+ {_, [{a, Fun}]} = eval("a = fn {:foo, x} -> x; {:bar, x} -> x end"),
+ 1 = Fun({foo, 1}),
+ 2 = Fun({bar, 2}).
function_with_args_test() ->
{Fun, _} = eval("fn(a, b) -> a + b end"),
@@ -119,9 +119,9 @@ function_call_with_parens_args_and_nested_when_test() ->
%% Partial application
require_partial_application_test() ->
- { Fun, _ } = eval("&List.flatten(&1)"),
+ {Fun, _} = eval("&List.flatten(&1)"),
Fun = fun 'Elixir.List':flatten/1.
import_partial_application_test() ->
- { Fun, _ } = eval("&is_atom(&1)"),
+ {Fun, _} = eval("&is_atom(&1)"),
Fun = fun erlang:is_atom/1.
diff --git a/lib/elixir/test/erlang/match_test.erl b/lib/elixir/test/erlang/match_test.erl
index 75d348e62..317b066da 100644
--- a/lib/elixir/test/erlang/match_test.erl
+++ b/lib/elixir/test/erlang/match_test.erl
@@ -4,8 +4,8 @@
eval(Content) -> eval(Content, []).
eval(Content, Initial) ->
- { Value, Binding, _, _ } = elixir:eval(Content, Initial),
- { Value, Binding }.
+ {Value, Binding, _, _} = elixir:eval(Content, Initial),
+ {Value, Binding}.
no_assignment_test() ->
{nil, []} = eval("").
@@ -30,9 +30,9 @@ not_single_assignment_test() ->
duplicated_assignment_on_module_with_tuple_test() ->
F = fun() ->
- eval("defmodule Foo do\ndef v({ a, _left }, { a, _right }), do: a\nend"),
- {1,_} = eval("Foo.v({ 1, :foo }, { 1, :bar })"),
- ?assertError(function_clause, eval("Foo.v({ 1, :foo }, { 2, :bar })"))
+ eval("defmodule Foo do\ndef v({a, _left}, {a, _right}), do: a\nend"),
+ {1,_} = eval("Foo.v({1, :foo}, {1, :bar})"),
+ ?assertError(function_clause, eval("Foo.v({1, :foo}, {2, :bar})"))
end,
test_helper:run_and_remove(F, ['Elixir.Foo']).
@@ -67,7 +67,7 @@ underscore_assignment_test() ->
{1, []} = eval("_ = 1").
assignment_precedence_test() ->
- {_, [{x,{ '__block__', _, [1,2,3]}}]} = eval("x = quote do\n1\n2\n3\nend").
+ {_, [{x,{'__block__', _, [1,2,3]}}]} = eval("x = quote do\n1\n2\n3\nend").
% Tuples match
simple_tuple_test() ->
diff --git a/lib/elixir/test/erlang/module_test.erl b/lib/elixir/test/erlang/module_test.erl
index c0844cf90..663b15d6b 100644
--- a/lib/elixir/test/erlang/module_test.erl
+++ b/lib/elixir/test/erlang/module_test.erl
@@ -2,8 +2,8 @@
-include_lib("eunit/include/eunit.hrl").
eval(Content) ->
- { Value, Binding, _, _ } = elixir:eval(Content, []),
- { Value, Binding }.
+ {Value, Binding, _, _} = elixir:eval(Content, []),
+ {Value, Binding}.
definition_test() ->
F = fun() ->
@@ -25,7 +25,7 @@ function_test() ->
test_helper:run_and_remove(F, ['Elixir.Foo.Bar.Baz']).
quote_unquote_splicing_test() ->
- { { '{}', [], [1,2,3,4,5] }, _ } = eval("x = [2,3,4]\nquote do: { 1, unquote_splicing(x), 5}").
+ {{'{}', [], [1,2,3,4,5]}, _} = eval("x = [2,3,4]\nquote do: {1, unquote_splicing(x), 5}").
def_shortcut_test() ->
F = fun() ->
@@ -88,17 +88,17 @@ nesting_test() ->
test_helper:run_and_remove(F, ['Elixir.Foo', 'Elixir.Bar']).
dot_alias_test() ->
- { 'Elixir.Foo.Bar.Baz', _ } = eval("Foo.Bar.Baz").
+ {'Elixir.Foo.Bar.Baz', _} = eval("Foo.Bar.Baz").
dot_dyn_alias_test() ->
- { 'Elixir.Foo.Bar.Baz', _ } = eval("a = Foo.Bar; a.Baz").
+ {'Elixir.Foo.Bar.Baz', _} = eval("a = Foo.Bar; a.Baz").
single_ref_test() ->
- { 'Elixir.Foo', _ } = eval("Foo"),
- { 'Elixir.Foo', _ } = eval("Elixir.Foo").
+ {'Elixir.Foo', _} = eval("Foo"),
+ {'Elixir.Foo', _} = eval("Elixir.Foo").
nested_ref_test() ->
- { 'Elixir.Foo.Bar.Baz', _ } = eval("Foo.Bar.Baz").
+ {'Elixir.Foo.Bar.Baz', _} = eval("Foo.Bar.Baz").
dynamic_defmodule_test() ->
F = fun() ->
diff --git a/lib/elixir/test/erlang/operators_test.erl b/lib/elixir/test/erlang/operators_test.erl
index b6a0bf275..d4ef8936c 100644
--- a/lib/elixir/test/erlang/operators_test.erl
+++ b/lib/elixir/test/erlang/operators_test.erl
@@ -2,8 +2,8 @@
-include_lib("eunit/include/eunit.hrl").
eval(Content) ->
- { Value, Binding, _, _ } = elixir:eval(Content, []),
- { Value, Binding }.
+ {Value, Binding, _, _} = elixir:eval(Content, []),
+ {Value, Binding}.
separator_test() ->
{334,[]} = eval("3_34"),
diff --git a/lib/elixir/test/erlang/string_test.erl b/lib/elixir/test/erlang/string_test.erl
index fba1e5b29..06a5b9fb3 100644
--- a/lib/elixir/test/erlang/string_test.erl
+++ b/lib/elixir/test/erlang/string_test.erl
@@ -3,8 +3,8 @@
-include_lib("eunit/include/eunit.hrl").
eval(Content) ->
- { Value, Binding, _, _ } = elixir:eval(Content, []),
- { Value, Binding }.
+ {Value, Binding, _, _} = elixir:eval(Content, []),
+ {Value, Binding}.
extract_interpolations(String) ->
element(2, elixir_interpolation:extract(1,
diff --git a/lib/elixir/test/erlang/test_helper.erl b/lib/elixir/test/erlang/test_helper.erl
index dbdc98172..70d2d0541 100644
--- a/lib/elixir/test/erlang/test_helper.erl
+++ b/lib/elixir/test/erlang/test_helper.erl
@@ -27,7 +27,7 @@ run_and_remove(Fun, Modules) ->
% Throws an error with the Erlang Abstract Form from the Elixir string
throw_elixir(String) ->
Forms = elixir:'string_to_quoted!'(String, 1, <<"nofile">>, []),
- { Expr, _, _ } = elixir:quoted_to_erl(Forms, elixir:env_for_eval([])),
+ {Expr, _, _} = elixir:quoted_to_erl(Forms, elixir:env_for_eval([])),
erlang:error(io:format("~p~n", [Expr])).
% Throws an error with the Erlang Abstract Form from the Erlang string
diff --git a/lib/elixir/test/erlang/tokenizer_test.erl b/lib/elixir/test/erlang/tokenizer_test.erl
index be01f0ad0..149389d2c 100644
--- a/lib/elixir/test/erlang/tokenizer_test.erl
+++ b/lib/elixir/test/erlang/tokenizer_test.erl
@@ -3,11 +3,11 @@
-include_lib("eunit/include/eunit.hrl").
tokenize(String) ->
- { ok, _Line, Result } = elixir_tokenizer:tokenize(String, 1, []),
+ {ok, _Line, Result} = elixir_tokenizer:tokenize(String, 1, []),
Result.
tokenize_error(String) ->
- { error, Error, _, _ } = elixir_tokenizer:tokenize(String, 1, []),
+ {error, Error, _, _} = elixir_tokenizer:tokenize(String, 1, []),
Error.
type_test() ->
@@ -49,7 +49,7 @@ quoted_atom_test() ->
oversized_atom_test() ->
OversizedAtom = [$:|string:copies("a", 256)],
- { 1, "atom length must be less than system limit", ":" } = tokenize_error(OversizedAtom).
+ {1, "atom length must be less than system limit", ":"} = tokenize_error(OversizedAtom).
op_atom_test() ->
[{atom,1,f0_1}] = tokenize(":f0_1").
diff --git a/lib/elixir/unicode/unicode.ex b/lib/elixir/unicode/unicode.ex
index a5e9f9f83..525a534d7 100644
--- a/lib/elixir/unicode/unicode.ex
+++ b/lib/elixir/unicode/unicode.ex
@@ -14,7 +14,7 @@ defmodule String.Unicode do
data_path = Path.join(__DIR__, "UnicodeData.txt")
- { codes, whitespace } = Enum.reduce File.stream!(data_path), { [], [] }, fn(line, { cacc, wacc }) ->
+ {codes, whitespace} = Enum.reduce File.stream!(data_path), {[], []}, fn(line, {cacc, wacc}) ->
[ codepoint, _name, _category,
_class, bidi, _decomposition,
_numeric_1, _numeric_2, _numeric_3,
@@ -25,11 +25,11 @@ defmodule String.Unicode do
cond do
upper != "" or lower != "" or title != "" ->
- { [{ to_binary.(codepoint), to_binary.(upper), to_binary.(lower), to_binary.(title) } | cacc], wacc }
+ {[{to_binary.(codepoint), to_binary.(upper), to_binary.(lower), to_binary.(title)} | cacc], wacc}
bidi in ["B", "S", "WS"] ->
- { cacc, [to_binary.(codepoint) | wacc] }
+ {cacc, [to_binary.(codepoint) | wacc]}
true ->
- { cacc, wacc }
+ {cacc, wacc}
end
end
@@ -38,14 +38,14 @@ defmodule String.Unicode do
codes = Enum.reduce File.stream!(special_path), codes, fn(line, acc) ->
[ codepoint, lower, title, upper, _comment ] = :binary.split(line, "; ", [:global])
key = to_binary.(codepoint)
- :lists.keystore(key, 1, acc, { key, to_binary.(upper), to_binary.(lower), to_binary.(title) })
+ :lists.keystore(key, 1, acc, {key, to_binary.(upper), to_binary.(lower), to_binary.(title)})
end
# Downcase
def downcase(string), do: do_downcase(string) |> iodata_to_binary
- for { codepoint, _upper, lower, _title } <- codes, lower && lower != codepoint do
+ for {codepoint, _upper, lower, _title} <- codes, lower && lower != codepoint do
defp do_downcase(unquote(codepoint) <> rest) do
unquote(:binary.bin_to_list(lower)) ++ downcase(rest)
end
@@ -61,7 +61,7 @@ defmodule String.Unicode do
def upcase(string), do: do_upcase(string) |> iodata_to_binary
- for { codepoint, upper, _lower, _title } <- codes, upper && upper != codepoint do
+ for {codepoint, upper, _lower, _title} <- codes, upper && upper != codepoint do
defp do_upcase(unquote(codepoint) <> rest) do
unquote(:binary.bin_to_list(upper)) ++ do_upcase(rest)
end
@@ -75,16 +75,16 @@ defmodule String.Unicode do
# Titlecase once
- def titlecase_once(""), do: { "", "" }
+ def titlecase_once(""), do: {"", ""}
- for { codepoint, _upper, _lower, title } <- codes, title && title != codepoint do
+ for {codepoint, _upper, _lower, title} <- codes, title && title != codepoint do
def titlecase_once(unquote(codepoint) <> rest) do
- { unquote(title), rest }
+ {unquote(title), rest}
end
end
def titlecase_once(<< char, rest :: binary >>) do
- { << char >>, rest }
+ {<< char >>, rest}
end
# Strip
@@ -143,7 +143,7 @@ defmodule String.Unicode do
add_buffer_to_acc(buffer, acc)
end
- @compile { :inline, add_buffer_to_acc: 2 }
+ @compile {:inline, add_buffer_to_acc: 2}
defp add_buffer_to_acc("", acc), do: acc
defp add_buffer_to_acc(buffer, acc), do: [buffer|acc]
@@ -151,11 +151,11 @@ defmodule String.Unicode do
# Codepoints
def next_codepoint(<< cp :: utf8, rest :: binary >>) do
- { <<cp :: utf8>>, rest }
+ {<<cp :: utf8>>, rest}
end
def next_codepoint(<< cp, rest :: binary >>) do
- { <<cp>>, rest }
+ {<<cp>>, rest}
end
def next_codepoint(<<>>) do
@@ -166,7 +166,7 @@ defmodule String.Unicode do
do_codepoints(next_codepoint(binary))
end
- defp do_codepoints({ c, rest }) do
+ defp do_codepoints({c, rest}) do
[c|do_codepoints(next_codepoint(rest))]
end
@@ -209,13 +209,13 @@ defmodule String.Graphemes do
# Don't break CRLF
def next_grapheme(<< ?\n, ?\r, rest :: binary >>) do
- { "\n\r", rest }
+ {"\n\r", rest}
end
# Break on control
for codepoint <- cluster["CR"] ++ cluster["LF"] ++ cluster["Control"] do
def next_grapheme(<< unquote(codepoint), rest :: binary >> = string) do
- { :binary.part(string, 0, unquote(size(codepoint))), rest }
+ {:binary.part(string, 0, unquote(size(codepoint))), rest}
end
end
@@ -253,7 +253,7 @@ defmodule String.Graphemes do
end
def next_grapheme(<< cp, rest :: binary >>) do
- { <<cp>>, rest }
+ {<<cp>>, rest}
end
def next_grapheme(<<>>) do
@@ -324,7 +324,7 @@ defmodule String.Graphemes do
end
defp next_extend(rest, string, size) do
- { :binary.part(string, 0, size), rest }
+ {:binary.part(string, 0, size), rest}
end
# Handle Prepend
@@ -335,14 +335,14 @@ defmodule String.Graphemes do
# end
#
# defp next_prepend(rest, string, size) do
- # { :binary.part(string, 0, size), rest }
+ # {:binary.part(string, 0, size), rest}
# end
def graphemes(binary) when is_binary(binary) do
do_graphemes(next_grapheme(binary))
end
- defp do_graphemes({ c, rest }) do
+ defp do_graphemes({c, rest}) do
[c|do_graphemes(next_grapheme(rest))]
end