Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 8258d56b34 | |||
| 5a8e0b602d | |||
| ae81b7eb0a | |||
| b487f98d9e | |||
| 1dc686215e | |||
| c8670ae1b9 |
@ -3,7 +3,6 @@
|
||||
{included_applications,[]},
|
||||
{applications,[stdlib,kernel]},
|
||||
{description,"Gajumaru interoperation library"},
|
||||
{vsn,"0.7.0"},
|
||||
{modules,[hakuzaru,hz,hz_fetcher,hz_grids,hz_key_master,hz_man,
|
||||
hz_sup]},
|
||||
{vsn,"0.5.1"},
|
||||
{modules,[hakuzaru,hz,hz_fetcher,hz_man,hz_sup]},
|
||||
{mod,{hakuzaru,[]}}]}.
|
||||
|
||||
4096
priv/words4096.txt
4096
priv/words4096.txt
File diff suppressed because it is too large
Load Diff
@ -6,7 +6,7 @@
|
||||
%%% @end
|
||||
|
||||
-module(hakuzaru).
|
||||
-vsn("0.7.0").
|
||||
-vsn("0.5.1").
|
||||
-author("Craig Everett <ceverett@tsuriai.jp>").
|
||||
-copyright("Craig Everett <ceverett@tsuriai.jp>").
|
||||
-license("GPL-3.0-or-later").
|
||||
|
||||
636
src/hz.erl
636
src/hz.erl
@ -9,7 +9,7 @@
|
||||
%%%
|
||||
%%% The get/set admin functions are for setting or checking things like the Gajumaru
|
||||
%%% "network ID" and list of addresses of nodes you want to use for answering
|
||||
%%% queries to the blockchain. Get functions are arity 0, and set functions are arity 1.
|
||||
%%% queries to the blockchain.
|
||||
%%%
|
||||
%%% The JSON query interface functions are the blockchain query functions themselves
|
||||
%%% which are translated to network queries and return Erlang messages as responses.
|
||||
@ -18,12 +18,12 @@
|
||||
%%% a desired call to a smart contract on the chain to call data serialized in a form
|
||||
%%% that a Gajumaru compatible wallet or library can sign and submit to a Gajumaru node.
|
||||
%%%
|
||||
%%% NOTE:
|
||||
%%% This module does not implement the OTP application behavior. Refer to hakuzaru.erl.
|
||||
%%% This module does not implement the OTP application behavior.
|
||||
%%% helper functions.
|
||||
%%% @end
|
||||
|
||||
-module(hz).
|
||||
-vsn("0.7.0").
|
||||
-vsn("0.5.1").
|
||||
-author("Craig Everett <ceverett@tsuriai.jp>").
|
||||
-copyright("Craig Everett <ceverett@tsuriai.jp>").
|
||||
-license("GPL-3.0-or-later").
|
||||
@ -73,8 +73,7 @@
|
||||
decode_bytearray_fate/1, decode_bytearray/2,
|
||||
spend/5, spend/10,
|
||||
sign_tx/2, sign_tx/3,
|
||||
sign_message/2, verify_signature/3,
|
||||
sign_binary/2, verify_bin_signature/3]).
|
||||
verify_signature/3]).
|
||||
|
||||
|
||||
%%% Types
|
||||
@ -225,7 +224,7 @@
|
||||
NetworkID :: string(),
|
||||
Reason :: term().
|
||||
%% @doc
|
||||
%% Returns the network ID or the atom `none' if unavailable.
|
||||
%% Returns the network ID or the atom `none' if it is unset.
|
||||
%% Checking this is not normally necessary, but if network ID assignment is dynamic
|
||||
%% in your system it may be necessary to call this before attempting to form
|
||||
%% call data or perform other actions on chain that require a signature.
|
||||
@ -241,9 +240,7 @@ network_id() ->
|
||||
%% @doc
|
||||
%% Returns the list of currently assigned nodes.
|
||||
%% The normal reason to call this is in preparation for altering the nodes list or
|
||||
%% checking the current list in debugging. Note that the first node in the list is
|
||||
%% the "sticky" node: the one that will be used for submitting transactions and
|
||||
%% querying `next_nonce'.
|
||||
%% checking the current list in debugging.
|
||||
|
||||
chain_nodes() ->
|
||||
hz_man:chain_nodes().
|
||||
@ -253,26 +250,19 @@ chain_nodes() ->
|
||||
when List :: [chain_node()],
|
||||
Reason :: {invalid, [term()]}.
|
||||
%% @doc
|
||||
%% Sets the chain nodes that will be queried whenever you communicate with the chain.
|
||||
%% Sets the nodes that are intended to be used as your interface to the peer
|
||||
%% network. The common situation is that your project runs a non-mining node as
|
||||
%% part of your backend infrastructure. Typically one or two nodes is plenty, but
|
||||
%% this may need to expand depending on how much query load your application generates.
|
||||
%% The Hakuzaru manager will load balance by round-robin distribution.
|
||||
%%
|
||||
%% The common situation is that a project runs a non-mining node as part of the backend
|
||||
%% infrastructure. Typically one or two nodes is plenty, but this may need to expand
|
||||
%% depending on how much query load your application generates.
|
||||
%%
|
||||
%% There are two situations: one node, or multiple nodes.
|
||||
%%
|
||||
%% Single node:
|
||||
%% In the case of a single node, everything passes through that one node. Duh.
|
||||
%%
|
||||
%% Multiple nodes:
|
||||
%% In the case of multiple nodes a distinction is made between the node to which
|
||||
%% transactions that update the chain state are made and to which `next_nonce' queries
|
||||
%% are made, and nodes that are used for read-only queries. The node to which stateful
|
||||
%% transactions are submitted is called the "sticky node". This is the first node
|
||||
%% (head position) in the list of nodes submitted to the chain when `chain_nodes/1'
|
||||
%% is called. If using multiple nodes but the sticky node should also be used for
|
||||
%% read-only queries, submit the sticky node at the head of the list and again in
|
||||
%% the tail.
|
||||
%% NOTE: When load balancing in this way be aware that there can be race conditions
|
||||
%% among the backend nodes with regard to a single account's current nonce when performing
|
||||
%% contract calls in quick succession. Round robin distribution is extremely useful when
|
||||
%% performing rapid lookups to the chain, but does not work well when submitting many
|
||||
%% transactions to the chain from a single user in a short period of time. A future version
|
||||
%% of this library will allow the caller to designate a single node as "sticky" to be used
|
||||
%% exclusively in the case of nonce reads and TX submissions.
|
||||
|
||||
chain_nodes(List) when is_list(List) ->
|
||||
hz_man:chain_nodes(List).
|
||||
@ -280,16 +270,7 @@ chain_nodes(List) when is_list(List) ->
|
||||
|
||||
-spec tls() -> boolean().
|
||||
%% @doc
|
||||
%% Check whether TLS is in use. The typical situation is to not use TLS as nodes that
|
||||
%% serve as part of the backend of an application are typically run in the same
|
||||
%% backend network as the application service. When accessing chain nodes over the WAN
|
||||
%% however, TLS is strongly recommended to avoid a MITM attack.
|
||||
%%
|
||||
%% In this version of Hakuzaru TLS is either on or off for all nodes, making a mixed
|
||||
%% infrastructure complicated to support without two Hakuzaru instances. This will
|
||||
%% likely become a per-node setting in the future.
|
||||
%%
|
||||
%% TLS defaults to `false'.
|
||||
%% Check whether TLS is in use.
|
||||
|
||||
tls() ->
|
||||
hz_man:tls().
|
||||
@ -299,8 +280,6 @@ tls() ->
|
||||
%% @doc
|
||||
%% Set TLS true or false. That's what a boolean is, by the way, `true' or `false'.
|
||||
%% This is a condescending comment. That means I am talking down to you.
|
||||
%%
|
||||
%% TLS defaults to `false'.
|
||||
|
||||
tls(Boolean) ->
|
||||
hz_man:tls(Boolean).
|
||||
@ -311,8 +290,6 @@ tls(Boolean) ->
|
||||
when Timeout :: pos_integer() | infinity.
|
||||
%% @doc
|
||||
%% Returns the current request timeout setting in milliseconds.
|
||||
%% The default timeout is 5,000ms.
|
||||
%% The max timeout is 120,000ms.
|
||||
|
||||
timeout() ->
|
||||
hz_man:timeout().
|
||||
@ -322,8 +299,6 @@ timeout() ->
|
||||
when MS :: pos_integer() | infinity.
|
||||
%% @doc
|
||||
%% Sets the request timeout in milliseconds.
|
||||
%% The default timeout is 5,000ms.
|
||||
%% The max timeout is 120,000ms.
|
||||
|
||||
timeout(MS) ->
|
||||
hz_man:timeout(MS).
|
||||
@ -600,18 +575,18 @@ acc_pending_txs(AccountID) ->
|
||||
%% Retrieve the next nonce for the given account
|
||||
|
||||
next_nonce(AccountID) ->
|
||||
case request_sticky(["/v3/accounts/", AccountID, "/next-nonce"]) of
|
||||
{ok, #{"next_nonce" := Nonce}} -> {ok, Nonce};
|
||||
{ok, #{"reason" := "Account not found"}} -> {ok, 1};
|
||||
{ok, #{"reason" := Reason}} -> {error, Reason};
|
||||
Error -> Error
|
||||
end.
|
||||
% case request_sticky(["/v3/accounts/", AccountID]) of
|
||||
% {ok, #{"nonce" := Nonce}} -> {ok, Nonce + 1};
|
||||
% case request(["/v3/accounts/", AccountID, "/next-nonce"]) of
|
||||
% {ok, #{"next_nonce" := Nonce}} -> {ok, Nonce};
|
||||
% {ok, #{"reason" := "Account not found"}} -> {ok, 1};
|
||||
% {ok, #{"reason" := Reason}} -> {error, Reason};
|
||||
% Error -> Error
|
||||
% end.
|
||||
case request(["/v3/accounts/", AccountID]) of
|
||||
{ok, #{"nonce" := Nonce}} -> {ok, Nonce + 1};
|
||||
{ok, #{"reason" := "Account not found"}} -> {ok, 1};
|
||||
{ok, #{"reason" := Reason}} -> {error, Reason};
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
|
||||
-spec dry_run(TX) -> {ok, Result} | {error, Reason}
|
||||
@ -753,7 +728,7 @@ tx_info(ID) ->
|
||||
|
||||
post_tx(Data) when is_binary(Data) ->
|
||||
JSON = zj:binary_encode(#{tx => Data}),
|
||||
request_sticky("/v3/transactions", JSON);
|
||||
request("/v3/transactions", JSON);
|
||||
post_tx(Data) when is_list(Data) ->
|
||||
post_tx(list_to_binary(Data)).
|
||||
|
||||
@ -865,14 +840,6 @@ status_chainends() ->
|
||||
request("/v3/status/chain-ends").
|
||||
|
||||
|
||||
request_sticky(Path) ->
|
||||
hz_man:request_sticky(unicode:characters_to_list(Path)).
|
||||
|
||||
|
||||
request_sticky(Path, Payload) ->
|
||||
hz_man:request_sticky(unicode:characters_to_list(Path), Payload).
|
||||
|
||||
|
||||
request(Path) ->
|
||||
hz_man:request(unicode:characters_to_list(Path)).
|
||||
|
||||
@ -922,7 +889,7 @@ contract_create(CreatorID, Path, InitArgs) ->
|
||||
when CreatorID :: pubkey(),
|
||||
Nonce :: pos_integer(),
|
||||
Amount :: non_neg_integer(),
|
||||
TTL :: non_neg_integer(),
|
||||
TTL :: pos_integer(),
|
||||
Gas :: pos_integer(),
|
||||
GasPrice :: pos_integer(),
|
||||
Path :: file:filename(),
|
||||
@ -1178,7 +1145,7 @@ read_aci(Path) ->
|
||||
try
|
||||
{ok, binary_to_term(Bin, [safe])}
|
||||
catch
|
||||
error:badarg -> {error, bad_aci}
|
||||
error:badarg -> error
|
||||
end;
|
||||
Error ->
|
||||
Error
|
||||
@ -1259,7 +1226,7 @@ contract_call(CallerID, Gas, AACI, ConID, Fun, Args) ->
|
||||
Gas :: pos_integer(),
|
||||
GasPrice :: pos_integer(),
|
||||
Amount :: non_neg_integer(),
|
||||
TTL :: non_neg_integer(),
|
||||
TTL :: pos_integer(),
|
||||
AACI :: aaci(),
|
||||
ConID :: unicode:chardata(),
|
||||
Fun :: string(),
|
||||
@ -1452,8 +1419,7 @@ prepare_aaci(ACI) ->
|
||||
% down to the concrete types they actually represent. We annotate each
|
||||
% subexpression of this concrete type with other info too, in case it helps
|
||||
% make error messages easier to understand.
|
||||
InternalTypeDefs = maps:merge(builtin_typedefs(), TypeDefs),
|
||||
Specs = annotate_function_specs(OpaqueSpecs, InternalTypeDefs, #{}),
|
||||
Specs = annotate_function_specs(OpaqueSpecs, TypeDefs, #{}),
|
||||
|
||||
{aaci, Name, Specs, TypeDefs}.
|
||||
|
||||
@ -1554,86 +1520,21 @@ opaque_type(Params, #{variant := VariantDefs}) ->
|
||||
{variant, Variants};
|
||||
opaque_type(Params, #{tuple := TypeDefs}) ->
|
||||
{tuple, [opaque_type(Params, Type) || Type <- TypeDefs]};
|
||||
opaque_type(_, #{bytes := Count}) ->
|
||||
{bytes, [Count]};
|
||||
opaque_type(Params, Pair) when is_map(Pair) ->
|
||||
[{Name, TypeArgs}] = maps:to_list(Pair),
|
||||
{opaque_type_name(Name), [opaque_type(Params, Arg) || Arg <- TypeArgs]}.
|
||||
|
||||
% Atoms for any builtins that aren't qualified by a namespace in Sophia.
|
||||
% Everything else stays as a string, user-defined or not.
|
||||
% atoms for builtins, strings (lists) for user-defined types
|
||||
opaque_type_name(<<"int">>) -> integer;
|
||||
opaque_type_name(<<"bool">>) -> boolean;
|
||||
opaque_type_name(<<"bits">>) -> bits;
|
||||
opaque_type_name(<<"char">>) -> char;
|
||||
opaque_type_name(<<"string">>) -> string;
|
||||
opaque_type_name(<<"address">>) -> address;
|
||||
opaque_type_name(<<"signature">>) -> signature;
|
||||
opaque_type_name(<<"contract">>) -> contract;
|
||||
opaque_type_name(<<"bool">>) -> boolean;
|
||||
opaque_type_name(<<"option">>) -> option;
|
||||
opaque_type_name(<<"list">>) -> list;
|
||||
opaque_type_name(<<"map">>) -> map;
|
||||
% I'm not sure how to produce channels in Sophia source, but they seem to exist
|
||||
% in gmb still.
|
||||
opaque_type_name(<<"channel">>) -> channel;
|
||||
opaque_type_name(<<"string">>) -> string;
|
||||
opaque_type_name(Name) -> binary_to_list(Name).
|
||||
|
||||
builtin_typedefs() ->
|
||||
#{"unit" => {[], {tuple, []}},
|
||||
"void" => {[], {variant, []}},
|
||||
"hash" => {[], {bytes, [32]}},
|
||||
"option" => {["'T"], {variant, [{"None", []},
|
||||
{"Some", [{var, "'T"}]}]}},
|
||||
"Chain.ttl" => {[], {variant, [{"FixedTTL", [integer]},
|
||||
{"RelativeTTL", [integer]}]}},
|
||||
"AENS.pointee" => {[], {variant, [{"AccountPt", [address]},
|
||||
{"OraclePt", [address]},
|
||||
{"ContractPt", [address]},
|
||||
{"ChannelPt", [address]}]}},
|
||||
"AENS.name" => {[], {variant, [{"Name", [address,
|
||||
"Chain.ttl",
|
||||
{map, [string, "AENS.pointee"]}]}]}},
|
||||
"AENSv2.pointee" => {[], {variant, [{"AccountPt", [address]},
|
||||
{"OraclePt", [address]},
|
||||
{"ContractPt", [address]},
|
||||
{"ChannelPt", [address]},
|
||||
{"DataPt", [{bytes, [any]}]}]}},
|
||||
"AENSv2.name" => {[], {variant, [{"Name", [address,
|
||||
"Chain.ttl",
|
||||
{map, [string, "AENSv2.pointee"]}]}]}},
|
||||
"Chain.ga_meta_tx" => {[], {variant, [{"GAMetaTx", [address, integer]}]}},
|
||||
"Chain.paying_for_tx" => {[], {variant, [{"PayingForTx", [address, integer]}]}},
|
||||
"Chain.base_tx" => {[], {variant, [{"SpendTx", [address, integer, string]},
|
||||
{"OracleRegisterTx", []},
|
||||
{"OracleQueryTx", []},
|
||||
{"OracleResponseTx", []},
|
||||
{"OracleExtendTx", []},
|
||||
{"NamePreclaimTx", []},
|
||||
{"NameClaimTx", ["hash"]},
|
||||
{"NameUpdateTx", [string]},
|
||||
{"NameRevokeTx", ["hash"]},
|
||||
{"NameTransferTx", [address, string]},
|
||||
{"ChannelCreateTx", [address]},
|
||||
{"ChannelDepositTx", [address, integer]},
|
||||
{"ChannelWithdrawTx", [address, integer]},
|
||||
{"ChannelForceProgressTx", [address]},
|
||||
{"ChannelCloseMutualTx", [address]},
|
||||
{"ChannelCloseSoloTx", [address]},
|
||||
{"ChannelSlashTx", [address]},
|
||||
{"ChannelSettleTx", [address]},
|
||||
{"ChannelSnapshotSoloTx", [address]},
|
||||
{"ContractCreateTx", [integer]},
|
||||
{"ContractCallTx", [address, integer]},
|
||||
{"GAAttachTx", []}]}},
|
||||
"Chain.tx" => {[], {record, [{"paying_for", {"option", ["Chain.paying_for_tx"]}},
|
||||
{"ga_metas", {list, ["Chain.ga_meta_tx"]}},
|
||||
{"actor", address},
|
||||
{"fee", integer},
|
||||
{"ttl", integer},
|
||||
{"tx", "Chain.base_tx"}]}},
|
||||
"MCL_BLS12_381.fr" => {[], {bytes, [32]}},
|
||||
"MCL_BLS12_381.fp" => {[], {bytes, [48]}}
|
||||
}.
|
||||
|
||||
% Type preparation has two goals. First, we need a data structure that can be
|
||||
% traversed quickly, to take sophia-esque erlang expressions and turn them into
|
||||
% fate-esque erlang expressions that gmbytecode can serialize. Second, we need
|
||||
@ -1675,10 +1576,6 @@ annotate_type(T, Types) ->
|
||||
Error
|
||||
end.
|
||||
|
||||
annotate_type2(T, _, _, unknown_type, _) ->
|
||||
% If a type is unknown, then it should not be reported as the normalized
|
||||
% name.
|
||||
{ok, {T, unknown_type, unknown_type}};
|
||||
annotate_type2(T, AlreadyNormalized, NOpaque, NExpanded, Types) ->
|
||||
case annotate_type_subexpressions(NExpanded, Types) of
|
||||
{ok, Flat} ->
|
||||
@ -1700,10 +1597,6 @@ annotate_types([], _Types, Acc) ->
|
||||
|
||||
annotate_type_subexpressions(PrimitiveType, _Types) when is_atom(PrimitiveType) ->
|
||||
{ok, PrimitiveType};
|
||||
annotate_type_subexpressions({bytes, [Count]}, _Types) ->
|
||||
% bytes is weird, because it has an argument, but that argument isn't an
|
||||
% opaque type.
|
||||
{ok, {bytes, [Count]}};
|
||||
annotate_type_subexpressions({variant, VariantsOpaque}, Types) ->
|
||||
case annotate_variants(VariantsOpaque, Types, []) of
|
||||
{ok, Variants} -> {ok, {variant, Variants}};
|
||||
@ -1736,99 +1629,116 @@ annotate_variants([{Name, Elems} | Rest], Types, Acc) ->
|
||||
annotate_variants([], _Types, Acc) ->
|
||||
{ok, lists:reverse(Acc)}.
|
||||
|
||||
% This function evaluates type aliases in a loop, until eventually a usable
|
||||
% definition is found.
|
||||
normalize_opaque_type(T, Types) -> normalize_opaque_type(T, Types, true).
|
||||
normalize_opaque_type(T, Types) ->
|
||||
case type_is_expanded(T) of
|
||||
false -> normalize_opaque_type(T, Types, true);
|
||||
true -> {ok, true, T, T}
|
||||
end.
|
||||
|
||||
% FIXME detect infinite loops
|
||||
% FIXME detect builtins with the wrong number of arguments
|
||||
% FIXME should nullary types have an empty list of arguments added before now?
|
||||
normalize_opaque_type(T, _Types, IsFirst) when is_atom(T) ->
|
||||
% Once we have eliminated the above rewrite cases, all other cases are
|
||||
% handled explicitly by the coerce logic, and so are considered normalized.
|
||||
{ok, IsFirst, T, T};
|
||||
normalize_opaque_type(Type = {T, _}, _Types, IsFirst) when is_atom(T) ->
|
||||
% Once we have eliminated the above rewrite cases, all other cases are
|
||||
% handled explicitly by the coerce logic, and so are considered normalized.
|
||||
{ok, IsFirst, Type, Type};
|
||||
normalize_opaque_type({option, [T]}, _Types, IsFirst) ->
|
||||
% Just like user-made ADTs, 'option' is considered part of the type, and so
|
||||
% options are considered normalised.
|
||||
{ok, IsFirst, {option, [T]}, {variant, [{"None", []}, {"Some", [T]}]}};
|
||||
normalize_opaque_type(T, Types, IsFirst) when is_list(T) ->
|
||||
% Lists/strings indicate userspace types, which may require arg
|
||||
% substitutions. Convert to an explicit but empty arg list, for uniformity.
|
||||
normalize_opaque_type({T, []}, Types, IsFirst);
|
||||
normalize_opaque_type({T, TypeArgs}, Types, IsFirst) when is_list(T) ->
|
||||
case maps:find(T, Types) of
|
||||
%{error, invalid_aci}; % FIXME more info
|
||||
error ->
|
||||
% We couldn't find this named type... Keep building the AACI, but
|
||||
% mark this type expression as unknown, so that FATE coercions
|
||||
% aren't attempted.
|
||||
{ok, IsFirst, {T, TypeArgs}, unknown_type};
|
||||
{ok, IsFirst, {T, TypeArgs}, {unknown_type, TypeArgs}};
|
||||
{ok, {TypeParamNames, Definition}} ->
|
||||
% We have a definition for this type, including names for whatever
|
||||
% args we have been given. Subtitute our args into this.
|
||||
NewType = substitute_opaque_type(TypeParamNames, Definition, TypeArgs),
|
||||
% Now continue on to see if we need to restart the loop or not.
|
||||
normalize_opaque_type2(IsFirst, {T, TypeArgs}, NewType, Types)
|
||||
Bindings = lists:zip(TypeParamNames, TypeArgs),
|
||||
normalize_opaque_type2(T, TypeArgs, Types, IsFirst, Bindings, Definition)
|
||||
end.
|
||||
|
||||
normalize_opaque_type2(IsFirst, PrevType, NextType = {variant, _}, _) ->
|
||||
% We have reduced to a variant. Report the type name as the normalized
|
||||
% type, but also provide the variant definition itself as the candidate
|
||||
% flattened type for further annotation.
|
||||
{ok, IsFirst, PrevType, NextType};
|
||||
normalize_opaque_type2(IsFirst, PrevType, NextType = {record, _}, _) ->
|
||||
% We have reduced to a record. Report the type name as the normalized
|
||||
% type, but also provide the record definition itself as the candidate
|
||||
% flattened type for further annotation.
|
||||
{ok, IsFirst, PrevType, NextType};
|
||||
normalize_opaque_type2(_, _, NextType, Types) ->
|
||||
% Not a variant or record yet, so go back to the start of the loop.
|
||||
% It will no longer be the first iteration.
|
||||
normalize_opaque_type(NextType, Types, false).
|
||||
normalize_opaque_type2(T, TypeArgs, Types, IsFirst, Bindings, Definition) ->
|
||||
SubResult =
|
||||
case Bindings of
|
||||
[] -> {ok, Definition};
|
||||
_ -> substitute_opaque_type(Bindings, Definition)
|
||||
end,
|
||||
case SubResult of
|
||||
% Type names were already normalized if they were ADTs or records,
|
||||
% since for those connectives the name is considered part of the type.
|
||||
{ok, NextT = {variant, _}} ->
|
||||
{ok, IsFirst, {T, TypeArgs}, NextT};
|
||||
{ok, NextT = {record, _}} ->
|
||||
{ok, IsFirst, {T, TypeArgs}, NextT};
|
||||
% Everything else has to be substituted down to a built-in connective
|
||||
% to be considered normalized.
|
||||
{ok, NextT} ->
|
||||
normalize_opaque_type3(NextT, Types);
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
% Perform a beta-reduction on a type expression.
|
||||
substitute_opaque_type([], Definition, _) ->
|
||||
% There are no parameters to substitute. This is the simplest way of
|
||||
% defining type aliases, records, and variants, so we should make sure to
|
||||
% short circuit all the recursive descent logic, since it won't actually
|
||||
% do anything.
|
||||
Definition;
|
||||
substitute_opaque_type(TypeParamNames, Definition, TypeArgs) ->
|
||||
% Bundle the param names alongside the args that we want to substitute, so
|
||||
% that we can keyfind the one list.
|
||||
Bindings = lists:zip(TypeParamNames, TypeArgs),
|
||||
substitute_opaque_type(Bindings, Definition).
|
||||
% while this does look like normalize_opaque_type/2, it sets IsFirst to false
|
||||
% instead of true, and is part of the loop, instead of being an initial
|
||||
% condition for the loop.
|
||||
normalize_opaque_type3(NextT, Types) ->
|
||||
case type_is_expanded(NextT) of
|
||||
false -> normalize_opaque_type(NextT, Types, false);
|
||||
true -> {ok, false, NextT, NextT}
|
||||
end.
|
||||
|
||||
% Strings indicate names that should be substituted. Atoms indicate built in
|
||||
% types, which don't need to be expanded, except for option.
|
||||
type_is_expanded({option, _}) -> false;
|
||||
type_is_expanded(X) when is_atom(X) -> true;
|
||||
type_is_expanded({X, _}) when is_atom(X) -> true;
|
||||
type_is_expanded(_) -> false.
|
||||
|
||||
% Skip traversal if there is nothing to substitute. This will often be the
|
||||
% most common case.
|
||||
substitute_opaque_type(Bindings, {var, VarName}) ->
|
||||
case lists:keyfind(VarName, 1, Bindings) of
|
||||
{_, TypeArg} -> TypeArg;
|
||||
% No valid ACI will create this case. Regardless, the user should
|
||||
% still be able to specify arbitrary gmb FATE terms for whatever this
|
||||
% is meant to be.
|
||||
false -> unknown_type
|
||||
false -> {error, invalid_aci};
|
||||
{_, TypeArg} -> {ok, TypeArg}
|
||||
end;
|
||||
substitute_opaque_type(Bindings, {variant, Args}) ->
|
||||
case substitute_variant_types(Bindings, Args, []) of
|
||||
{ok, Result} -> {ok, {variant, Result}};
|
||||
Error -> Error
|
||||
end;
|
||||
substitute_opaque_type(Bindings, {record, Args}) ->
|
||||
case substitute_record_types(Bindings, Args, []) of
|
||||
{ok, Result} -> {ok, {record, Result}};
|
||||
Error -> Error
|
||||
end;
|
||||
substitute_opaque_type(Bindings, {variant, Variants}) ->
|
||||
Each = fun({VariantName, Elements}) ->
|
||||
NewElements = substitute_opaque_types(Bindings, Elements),
|
||||
{VariantName, NewElements}
|
||||
end,
|
||||
NewVariants = lists:map(Each, Variants),
|
||||
{variant, NewVariants};
|
||||
substitute_opaque_type(Bindings, {record, Fields}) ->
|
||||
Each = fun({FieldName, FieldType}) ->
|
||||
NewType = substitute_opaque_type(Bindings, FieldType),
|
||||
{FieldName, NewType}
|
||||
end,
|
||||
NewFields = lists:map(Each, Fields),
|
||||
{record, NewFields};
|
||||
substitute_opaque_type(Bindings, {Connective, Args}) ->
|
||||
NewArgs = substitute_opaque_types(Bindings, Args),
|
||||
{Connective, NewArgs};
|
||||
case substitute_opaque_types(Bindings, Args, []) of
|
||||
{ok, Result} -> {ok, {Connective, Result}};
|
||||
Error -> Error
|
||||
end;
|
||||
substitute_opaque_type(_Bindings, Type) ->
|
||||
Type.
|
||||
{ok, Type}.
|
||||
|
||||
substitute_opaque_types(Bindings, Types) ->
|
||||
Each = fun(Type) -> substitute_opaque_type(Bindings, Type) end,
|
||||
lists:map(Each, Types).
|
||||
substitute_variant_types(Bindings, [{VariantName, Elements} | Rest], Acc) ->
|
||||
case substitute_opaque_types(Bindings, Elements, []) of
|
||||
{ok, Result} -> substitute_variant_types(Bindings, Rest, [{VariantName, Result} | Acc]);
|
||||
Error -> Error
|
||||
end;
|
||||
substitute_variant_types(_Bindings, [], Acc) ->
|
||||
{ok, lists:reverse(Acc)}.
|
||||
|
||||
substitute_record_types(Bindings, [{ElementName, Type} | Rest], Acc) ->
|
||||
case substitute_opaque_type(Bindings, Type) of
|
||||
{ok, Result} -> substitute_record_types(Bindings, Rest, [{ElementName, Result} | Acc]);
|
||||
Error -> Error
|
||||
end;
|
||||
substitute_record_types(_Bindings, [], Acc) ->
|
||||
{ok, lists:reverse(Acc)}.
|
||||
|
||||
substitute_opaque_types(Bindings, [Next | Rest], Acc) ->
|
||||
case substitute_opaque_type(Bindings, Next) of
|
||||
{ok, Result} -> substitute_opaque_types(Bindings, Rest, [Result | Acc]);
|
||||
Error -> Error
|
||||
end;
|
||||
substitute_opaque_types(_Bindings, [], Acc) ->
|
||||
{ok, lists:reverse(Acc)}.
|
||||
|
||||
coerce_bindings(VarTypes, Terms, Direction) ->
|
||||
DefLength = length(VarTypes),
|
||||
@ -1878,39 +1788,33 @@ coerce({O, N, integer}, S, to_fate) when is_list(S) ->
|
||||
error:badarg -> single_error({invalid, O, N, S})
|
||||
end;
|
||||
coerce({O, N, address}, S, to_fate) ->
|
||||
coerce_chain_object(O, N, address, account_pubkey, S);
|
||||
try
|
||||
case gmser_api_encoder:decode(unicode:characters_to_binary(S)) of
|
||||
{account_pubkey, Key} -> {ok, {address, Key}};
|
||||
_ -> single_error({invalid, O, N, S})
|
||||
end
|
||||
catch
|
||||
error:_ -> single_error({invalid, O, N, S})
|
||||
end;
|
||||
coerce({_, _, address}, {address, Bin}, from_fate) ->
|
||||
Address = gmser_api_encoder:encode(account_pubkey, Bin),
|
||||
{ok, unicode:characters_to_list(Address)};
|
||||
coerce({O, N, contract}, S, to_fate) ->
|
||||
coerce_chain_object(O, N, contract, contract_pubkey, S);
|
||||
try
|
||||
case gmser_api_encoder:decode(unicode:characters_to_binary(S)) of
|
||||
{contract_pubkey, Key} -> {ok, {contract, Key}};
|
||||
_ -> single_error({invalid, O, N, S})
|
||||
end
|
||||
catch
|
||||
error:_ -> single_error({invalid, O, N, S})
|
||||
end;
|
||||
coerce({_, _, contract}, {contract, Bin}, from_fate) ->
|
||||
Address = gmser_api_encoder:encode(contract_pubkey, Bin),
|
||||
{ok, unicode:characters_to_list(Address)};
|
||||
coerce({_, _, signature}, S, to_fate) when is_binary(S) andalso (byte_size(S) =:= 64) ->
|
||||
% Usually to pass a binary in, you need to wrap it as {raw, Binary}, but
|
||||
% since sg_... strings OR hex blobs can be used as signatures in Sophia, we
|
||||
% special case this case based on the length. Even if a binary starts with
|
||||
% "sg_", 64 characters is not enough to represent a 64 byte signature, so
|
||||
% the most optimistic interpretation is to use the binary directly.
|
||||
{ok, S};
|
||||
coerce({O, N, signature}, S, to_fate) ->
|
||||
coerce_chain_object(O, N, signature, signature, S);
|
||||
coerce({_, _, signature}, Bin, from_fate) ->
|
||||
Address = gmser_api_encoder:encode(signature, Bin),
|
||||
{ok, unicode:characters_to_list(Address)};
|
||||
%coerce({_, _, channel}, S, to_fate) when is_binary(S) ->
|
||||
%{ok, {channel, S}};
|
||||
%coerce({_, _, channel}, {channel, S}, from_fate) when is_binary(S) ->
|
||||
%{ok, S};
|
||||
coerce({_, _, boolean}, true, _) ->
|
||||
{ok, true};
|
||||
coerce({_, _, boolean}, "true", _) ->
|
||||
{ok, true};
|
||||
coerce({_, _, boolean}, false, _) ->
|
||||
{ok, false};
|
||||
coerce({_, _, boolean}, "false", _) ->
|
||||
{ok, false};
|
||||
coerce({O, N, boolean}, S, _) ->
|
||||
single_error({invalid, O, N, S});
|
||||
coerce({O, N, string}, Str, Direction) ->
|
||||
@ -1926,30 +1830,6 @@ coerce({O, N, string}, Str, Direction) ->
|
||||
StrBin ->
|
||||
{ok, StrBin}
|
||||
end;
|
||||
coerce({_, _, char}, Val, _Direction) when is_integer(Val) ->
|
||||
{ok, Val};
|
||||
coerce({O, N, char}, Str, to_fate) ->
|
||||
Result = unicode:characters_to_list(Str),
|
||||
case Result of
|
||||
{error, _, _} ->
|
||||
single_error({invalid, O, N, Str});
|
||||
{incomplete, _, _} ->
|
||||
single_error({invalid, O, N, Str});
|
||||
[C] ->
|
||||
{ok, C};
|
||||
_ ->
|
||||
single_error({invalid, O, N, Str})
|
||||
end;
|
||||
coerce({O, N, {bytes, [Count]}}, Bytes, _Direction) when is_bitstring(Bytes) ->
|
||||
coerce_bytes(O, N, Count, Bytes);
|
||||
coerce({_, _, bits}, {bits, Num}, from_fate) ->
|
||||
{ok, Num};
|
||||
coerce({_, _, bits}, Num, to_fate) when is_integer(Num) ->
|
||||
{ok, {bits, Num}};
|
||||
coerce({_, _, bits}, Bits, to_fate) when is_bitstring(Bits) ->
|
||||
Size = bit_size(Bits),
|
||||
<<IntValue:Size>> = Bits,
|
||||
{ok, {bits, IntValue}};
|
||||
coerce({_, _, {list, [Type]}}, Data, Direction) when is_list(Data) ->
|
||||
coerce_list(Type, Data, Direction);
|
||||
coerce({_, _, {map, [KeyType, ValType]}}, Data, Direction) when is_map(Data) ->
|
||||
@ -1999,38 +1879,6 @@ coerce({O, N, _}, Data, from_fate) ->
|
||||
{ok, Data};
|
||||
coerce({O, N, _}, Data, _) -> single_error({invalid, O, N, Data}).
|
||||
|
||||
coerce_bytes(O, N, _, Bytes) when bit_size(Bytes) rem 8 /= 0 ->
|
||||
single_error({partial_bytes, O, N, bit_size(Bytes)});
|
||||
coerce_bytes(_, _, any, Bytes) ->
|
||||
{ok, Bytes};
|
||||
coerce_bytes(O, N, Count, Bytes) when byte_size(Bytes) /= Count ->
|
||||
single_error({incorrect_size, O, N, Bytes});
|
||||
coerce_bytes(_, _, _, Bytes) ->
|
||||
{ok, Bytes}.
|
||||
|
||||
coerce_chain_object(_, _, _, _, {raw, Binary}) ->
|
||||
{ok, Binary};
|
||||
coerce_chain_object(O, N, T, Tag, S) ->
|
||||
case decode_chain_object(Tag, S) of
|
||||
{ok, Data} -> {ok, coerce_chain_object2(T, Data)};
|
||||
{error, Reason} -> single_error({Reason, O, N, S})
|
||||
end.
|
||||
|
||||
coerce_chain_object2(address, Data) -> {address, Data};
|
||||
coerce_chain_object2(contract, Data) -> {contract, Data};
|
||||
coerce_chain_object2(signature, Data) -> Data.
|
||||
|
||||
decode_chain_object(Tag, S) ->
|
||||
try
|
||||
case gmser_api_encoder:decode(unicode:characters_to_binary(S)) of
|
||||
{Tag, Data} -> {ok, Data};
|
||||
{_, _} -> {error, wrong_prefix}
|
||||
end
|
||||
catch
|
||||
error:missing_prefix -> {error, missing_prefix};
|
||||
error:incorrect_size -> {error, incorrect_size}
|
||||
end.
|
||||
|
||||
coerce_list(Type, Elements, Direction) ->
|
||||
% 0 index since it represents a sophia list
|
||||
coerce_list(Type, Elements, Direction, 0, [], []).
|
||||
@ -2425,22 +2273,8 @@ spend3(DSenderID,
|
||||
hz:post_tx(Encoded).
|
||||
|
||||
|
||||
-spec sign_message(Message, SecKey) -> Sig
|
||||
when Message :: binary(),
|
||||
SecKey :: binary(),
|
||||
Sig :: binary().
|
||||
|
||||
sign_message(Message, SecKey) ->
|
||||
Prefix = message_sig_prefix(),
|
||||
{ok, PSize} = vencode(byte_size(Prefix)),
|
||||
{ok, MSize} = vencode(byte_size(Message)),
|
||||
Smashed = iolist_to_binary([PSize, Prefix, MSize, Message]),
|
||||
{ok, Hashed} = eblake2:blake2b(32, Smashed),
|
||||
ecu_eddsa:sign_detached(Hashed, SecKey).
|
||||
|
||||
|
||||
-spec verify_signature(Sig, Message, PubKey) -> Result
|
||||
when Sig :: string(), % base64 encoded signature,
|
||||
when Sig :: binary(),
|
||||
Message :: iodata(),
|
||||
PubKey :: pubkey(),
|
||||
Result :: {ok, Outcome :: boolean()}
|
||||
@ -2465,7 +2299,7 @@ verify_signature2(Sig, Message, PK) ->
|
||||
% the user from accidentally signing a transaction disguised as a message.
|
||||
%
|
||||
% Salt the message then hash with blake2b.
|
||||
Prefix = message_sig_prefix(),
|
||||
Prefix = <<"Gajumaru Signed Message:\n">>,
|
||||
{ok, PSize} = vencode(byte_size(Prefix)),
|
||||
{ok, MSize} = vencode(byte_size(Message)),
|
||||
Smashed = iolist_to_binary([PSize, Prefix, MSize, Message]),
|
||||
@ -2475,7 +2309,6 @@ verify_signature2(Sig, Message, PK) ->
|
||||
Result = ecu_eddsa:sign_verify_detached(Signature, Hashed, PK),
|
||||
{ok, Result}.
|
||||
|
||||
message_sig_prefix() -> <<"Gajumaru Signed Message:\n">>.
|
||||
|
||||
% This is Bitcoin's variable-length unsigned integer encoding
|
||||
% See: https://en.bitcoin.it/wiki/Protocol_documentation#Variable_length_integer
|
||||
@ -2503,42 +2336,6 @@ eu(N, Size) ->
|
||||
<<Bytes/binary, ExtraZeros/binary>>.
|
||||
|
||||
|
||||
-spec sign_binary(Binary, SecKey) -> Sig
|
||||
when Binary :: binary(),
|
||||
SecKey :: binary(),
|
||||
Sig :: binary().
|
||||
|
||||
sign_binary(Binary, SecKey) ->
|
||||
Prefix = binary_sig_prefix(),
|
||||
Target = <<Prefix/binary, Binary/binary>>,
|
||||
{ok, Hash} = eblake2:blake2b(32, Target),
|
||||
ecu_eddsa:sign_detached(Hash, SecKey).
|
||||
|
||||
|
||||
-spec verify_bin_signature(Sig, Binary, PubKey) -> Result
|
||||
when Sig :: string(), % base64 encoded signature,
|
||||
Binary :: binary(),
|
||||
PubKey :: pubkey(),
|
||||
Result :: {ok, Outcome :: boolean()}
|
||||
| {error, Reason :: term()}.
|
||||
|
||||
verify_bin_signature(Sig, Binary, PubKey) ->
|
||||
case gmser_api_encoder:decode(PubKey) of
|
||||
{account_pubkey, PK} -> verify_bin_signature2(Sig, Binary, PK);
|
||||
Other -> {error, {bad_key, Other}}
|
||||
end.
|
||||
|
||||
verify_bin_signature2(Sig, Binary, PK) ->
|
||||
Prefix = binary_sig_prefix(),
|
||||
Target = <<Prefix/binary, Binary/binary>>,
|
||||
{ok, Hash} = eblake2:blake2b(32, Target),
|
||||
Signature = base64:decode(Sig),
|
||||
Result = ecu_eddsa:sign_verify_detached(Signature, Hash, PK),
|
||||
{ok, Result}.
|
||||
|
||||
binary_sig_prefix() -> <<"Gajumaru Signed Binary:">>.
|
||||
|
||||
|
||||
%%% Debug functionality
|
||||
|
||||
% debug_network() ->
|
||||
@ -2598,8 +2395,6 @@ try_coerce(Type, Sophia, Fate) ->
|
||||
_ ->
|
||||
erlang:error({from_fate_failed, Sophia, SophiaActual})
|
||||
end,
|
||||
% Finally, check that the FATE result is something that gmb understands.
|
||||
gmb_fate_encoding:serialize(Fate),
|
||||
ok.
|
||||
|
||||
coerce_int_test() ->
|
||||
@ -2622,25 +2417,6 @@ coerce_contract_test() ->
|
||||
167,208,53,78,40,235,2,163,132,36,47,183,228,151,9,
|
||||
210,39,214>>}).
|
||||
|
||||
coerce_signature_test() ->
|
||||
{ok, Type} = annotate_type(signature, #{}),
|
||||
try_coerce(Type,
|
||||
"sg_XDyF8LJC4tpMyAySvpaG1f5V9F2XxAbRx9iuVjvvdNMwVracLhzAuXhRM5kXAFtpwW1DCHuz5jGehUayCah4jub32Ti2n",
|
||||
<<231,4,97,129,16,173,37,42,194,249,28,94,134,163,208,84,22,135,
|
||||
169,85,212,142,14,12,233,252,97,50,193,158,229,51,123,206,222,
|
||||
249,2,3,85,173,106,150,243,253,89,128,248,52,195,140,95,114,
|
||||
233,110,119,143,206,137,124,36,63,154,85,7>>).
|
||||
|
||||
coerce_signature_binary_test() ->
|
||||
{ok, Type} = annotate_type(signature, #{}),
|
||||
Binary = <<231,4,97,129,16,173,37,42,194,249,28,94,134,163,208,84,22,135,
|
||||
169,85,212,142,14,12,233,252,97,50,193,158,229,51,123,206,222,
|
||||
249,2,3,85,173,106,150,243,253,89,128,248,52,195,140,95,114,
|
||||
233,110,119,143,206,137,124,36,63,154,85,7>>,
|
||||
{ok, Binary} = coerce(Type, {raw, Binary}, to_fate),
|
||||
{ok, Binary} = coerce(Type, Binary, to_fate),
|
||||
ok.
|
||||
|
||||
coerce_bool_test() ->
|
||||
{ok, Type} = annotate_type(boolean, #{}),
|
||||
try_coerce(Type, true, true),
|
||||
@ -2669,40 +2445,10 @@ coerce_variant_test() ->
|
||||
try_coerce(Type, {"A", 123}, {variant, [1, 2], 0, {123}}),
|
||||
try_coerce(Type, {"B", 456, 789}, {variant, [1, 2], 1, {456, 789}}).
|
||||
|
||||
coerce_option_test() ->
|
||||
{ok, Type} = annotate_type({"option", [integer]}, builtin_typedefs()),
|
||||
try_coerce(Type, {"None"}, {variant, [0, 1], 0, {}}),
|
||||
try_coerce(Type, {"Some", 1}, {variant, [0, 1], 1, {1}}).
|
||||
|
||||
coerce_record_test() ->
|
||||
{ok, Type} = annotate_type({record, [{"a", integer}, {"b", integer}]}, #{}),
|
||||
try_coerce(Type, #{"a" => 123, "b" => 456}, {tuple, {123, 456}}).
|
||||
|
||||
coerce_bytes_test() ->
|
||||
{ok, Type} = annotate_type({tuple, [{bytes, [4]}, {bytes, [any]}]}, #{}),
|
||||
try_coerce(Type, {<<"abcd">>, <<"efghi">>}, {tuple, {<<"abcd">>, <<"efghi">>}}).
|
||||
|
||||
coerce_bits_test() ->
|
||||
{ok, Type} = annotate_type(bits, #{}),
|
||||
try_coerce(Type, 5, {bits, 5}).
|
||||
|
||||
coerce_char_test() ->
|
||||
{ok, Type} = annotate_type(char, #{}),
|
||||
try_coerce(Type, $?, $?).
|
||||
|
||||
coerce_unicode_test() ->
|
||||
{ok, Type} = annotate_type(char, #{}),
|
||||
% Latin Small Letter C with cedilla and acute
|
||||
{ok, $ḉ} = coerce(Type, <<"ḉ"/utf8>>, to_fate),
|
||||
ok.
|
||||
|
||||
coerce_hash_test() ->
|
||||
{ok, Type} = annotate_type("hash", builtin_typedefs()),
|
||||
Hash = list_to_binary(lists:seq(1,32)),
|
||||
try_coerce(Type, Hash, Hash),
|
||||
ok.
|
||||
|
||||
|
||||
|
||||
%%% Complex AACI paramter and namespace tests
|
||||
|
||||
@ -2789,95 +2535,3 @@ param_test() ->
|
||||
try_coerce(Input, 0, 0),
|
||||
try_coerce(Output, 0, 0).
|
||||
|
||||
%%% Obscure Sophia types where we should check the AACI as well
|
||||
|
||||
obscure_aaci_test() ->
|
||||
Contract = "
|
||||
include \"Set.aes\"
|
||||
contract C =
|
||||
entrypoint options(): option(int) = None
|
||||
entrypoint fixed_bytes(): bytes(4) = #DEADBEEF
|
||||
entrypoint any_bytes(): bytes() = Bytes.to_any_size(#112233)
|
||||
entrypoint bits(): bits = Bits.all
|
||||
entrypoint character(): char = 'a'
|
||||
entrypoint hash(): hash = #00112233445566778899AABBCCDDEEFF00112233445566778899AABBCCDDEEFF
|
||||
entrypoint unit(): unit = ()
|
||||
|
||||
entrypoint ttl(x): Chain.ttl = FixedTTL(x)
|
||||
entrypoint paying_for(x, y): Chain.paying_for_tx = Chain.PayingForTx(x, y)
|
||||
entrypoint ga_meta_tx(x, y): Chain.ga_meta_tx = Chain.GAMetaTx(x, y)
|
||||
entrypoint base_tx(x, y, z): Chain.base_tx = Chain.SpendTx(x, y, z)
|
||||
entrypoint tx(a, b, c, d, e, f): Chain.tx =
|
||||
{paying_for = a,
|
||||
ga_metas = b,
|
||||
actor = c,
|
||||
fee = d,
|
||||
ttl = e,
|
||||
tx = f}
|
||||
|
||||
entrypoint pointee(x): AENS.pointee = AENS.AccountPt(x)
|
||||
entrypoint name(x, y, z): AENS.name = AENS.Name(x, y, z)
|
||||
entrypoint pointee2(x): AENSv2.pointee = AENSv2.DataPt(x)
|
||||
entrypoint name2(x, y, z): AENSv2.name = AENSv2.Name(x, y, z)
|
||||
|
||||
entrypoint fr(x): MCL_BLS12_381.fr = x
|
||||
entrypoint fp(x): MCL_BLS12_381.fp = x
|
||||
|
||||
entrypoint set(): Set.set(int) = Set.new()
|
||||
|
||||
",
|
||||
{ok, AACI} = aaci_from_string(Contract),
|
||||
|
||||
{ok, {[], {{bytes, [4]}, _, _}}} = aaci_lookup_spec(AACI, "fixed_bytes"),
|
||||
{ok, {[], {{bytes, [any]}, _, _}}} = aaci_lookup_spec(AACI, "any_bytes"),
|
||||
{ok, {[], {bits, _, _}}} = aaci_lookup_spec(AACI, "bits"),
|
||||
{ok, {[], {char, _, _}}} = aaci_lookup_spec(AACI, "character"),
|
||||
|
||||
{ok, {[], {{"option", [integer]}, _, {variant, [{"None", []}, {"Some", [_]}]}}}} = aaci_lookup_spec(AACI, "options"),
|
||||
{ok, {[], {"hash", _, {bytes, [32]}}}} = aaci_lookup_spec(AACI, "hash"),
|
||||
{ok, {[], {"unit", _, {tuple, []}}}} = aaci_lookup_spec(AACI, "unit"),
|
||||
|
||||
{ok, {_, {"Chain.ttl", _, {variant, _}}}} = aaci_lookup_spec(AACI, "ttl"),
|
||||
{ok, {_, {"Chain.paying_for_tx", _, {variant, _}}}} = aaci_lookup_spec(AACI, "paying_for"),
|
||||
{ok, {_, {"Chain.ga_meta_tx", _, {variant, _}}}} = aaci_lookup_spec(AACI, "ga_meta_tx"),
|
||||
{ok, {_, {"Chain.base_tx", _, {variant, _}}}} = aaci_lookup_spec(AACI, "base_tx"),
|
||||
{ok, {_, {"Chain.tx", _, {record, _}}}} = aaci_lookup_spec(AACI, "tx"),
|
||||
|
||||
{ok, {_, {"AENS.pointee", _, {variant, _}}}} = aaci_lookup_spec(AACI, "pointee"),
|
||||
{ok, {_, {"AENS.name", _, {variant, _}}}} = aaci_lookup_spec(AACI, "name"),
|
||||
{ok, {_, {"AENSv2.pointee", _, {variant, _}}}} = aaci_lookup_spec(AACI, "pointee2"),
|
||||
{ok, {_, {"AENSv2.name", _, {variant, _}}}} = aaci_lookup_spec(AACI, "name2"),
|
||||
|
||||
{ok, {_, {"MCL_BLS12_381.fr", _, {bytes, [32]}}}} = aaci_lookup_spec(AACI, "fr"),
|
||||
{ok, {_, {"MCL_BLS12_381.fp", _, {bytes, [48]}}}} = aaci_lookup_spec(AACI, "fp"),
|
||||
|
||||
{ok, {[], {{"Set.set", [integer]}, _, {record, [{"to_map", _}]}}}} = aaci_lookup_spec(AACI, "set"),
|
||||
|
||||
ok.
|
||||
|
||||
name_coerce_test() ->
|
||||
AddrSoph = "ak_2FTnrGfV8qsfHpaSEHpBrziioCpwwzLqSevHqfxQY3PaAAdARx",
|
||||
AddrFate = {address, <<164,136,155,90,124,22,40,206,255,76,213,56,238,123,
|
||||
167,208,53,78,40,235,2,163,132,36,47,183,228,151,9,
|
||||
210,39,214>>},
|
||||
{ok, TTL} = annotate_type("Chain.ttl", builtin_typedefs()),
|
||||
TTLSoph = {"FixedTTL", 0},
|
||||
TTLFate = {variant, [1, 1], 0, {0}},
|
||||
try_coerce(TTL, TTLSoph, TTLFate),
|
||||
{ok, Pointee} = annotate_type("AENS.pointee", builtin_typedefs()),
|
||||
PointeeSoph = {"AccountPt", AddrSoph},
|
||||
PointeeFate = {variant, [1, 1, 1, 1], 0, {AddrFate}},
|
||||
try_coerce(Pointee, PointeeSoph, PointeeFate),
|
||||
{ok, Name} = annotate_type("AENS.name", builtin_typedefs()),
|
||||
NameSoph = {"Name", AddrSoph, TTLSoph, #{"myname" => PointeeSoph}},
|
||||
NameFate = {variant, [3], 0, {AddrFate, TTLFate, #{<<"myname">> => PointeeFate}}},
|
||||
try_coerce(Name, NameSoph, NameFate).
|
||||
|
||||
void_coerce_test() ->
|
||||
% Void itself can't be represented, but other types built out of void are
|
||||
% valid.
|
||||
{ok, NonOption} = annotate_type({"option", ["void"]}, builtin_typedefs()),
|
||||
try_coerce(NonOption, {"None"}, {variant, [0, 1], 0, {}}),
|
||||
{ok, NonList} = annotate_type({list, ["void"]}, builtin_typedefs()),
|
||||
try_coerce(NonList, [], []).
|
||||
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
-module(hz_fetcher).
|
||||
-vsn("0.7.0").
|
||||
-vsn("0.5.1").
|
||||
-author("Craig Everett <ceverett@tsuriai.jp>").
|
||||
-copyright("Craig Everett <ceverett@tsuriai.jp>").
|
||||
-license("MIT").
|
||||
|
||||
-export([connect/4, connect_slowly/4]).
|
||||
-export([connect/4, slowly_connect/4]).
|
||||
|
||||
|
||||
connect(Node = {Host, Port}, Request, From, Timeout) ->
|
||||
@ -206,7 +206,7 @@ read_hval(_, Received, _, _, _) ->
|
||||
{error, headers}.
|
||||
|
||||
|
||||
connect_slowly(Node, {get, Path}, From, Timeout) ->
|
||||
slowly_connect(Node, {get, Path}, From, Timeout) ->
|
||||
HttpOptions = [{connect_timeout, 3000}, {timeout, Timeout}],
|
||||
URL = lists:flatten(url(Node, Path)),
|
||||
Request = {URL, []},
|
||||
@ -217,7 +217,7 @@ connect_slowly(Node, {get, Path}, From, Timeout) ->
|
||||
BAD -> {error, BAD}
|
||||
end,
|
||||
gen_server:reply(From, Result);
|
||||
connect_slowly(Node, {post, Path, Payload}, From, Timeout) ->
|
||||
slowly_connect(Node, {post, Path, Payload}, From, Timeout) ->
|
||||
HttpOptions = [{connect_timeout, 3000}, {timeout, Timeout}],
|
||||
URL = lists:flatten(url(Node, Path)),
|
||||
Request = {URL, [], "application/json", Payload},
|
||||
@ -236,6 +236,13 @@ url({Node, Port}, Path) when is_tuple(Node) ->
|
||||
["https://", inet:ntoa(Node), ":", integer_to_list(Port), Path].
|
||||
|
||||
|
||||
|
||||
log(Level, Format, Args) ->
|
||||
Raw = io_lib:format("~w ~w: " ++ Format, [?MODULE, self() | Args]),
|
||||
Entry = unicode:characters_to_list(Raw),
|
||||
logger:log(Level, Entry).
|
||||
|
||||
|
||||
disconnect(Socket) ->
|
||||
case peername(Socket) of
|
||||
{ok, {Addr, Port}} ->
|
||||
@ -262,6 +269,7 @@ disconnect(Socket, Host, Port) ->
|
||||
end
|
||||
end.
|
||||
|
||||
|
||||
peername(Socket) ->
|
||||
case inet:peername(Socket) of
|
||||
{ok, {{0, 0, 0, 0, 0, 65535, X, Y}, Port}} ->
|
||||
@ -270,9 +278,3 @@ peername(Socket) ->
|
||||
Other ->
|
||||
Other
|
||||
end.
|
||||
|
||||
|
||||
log(Level, Format, Args) ->
|
||||
Raw = io_lib:format("~w ~w: " ++ Format, [?MODULE, self() | Args]),
|
||||
Entry = unicode:characters_to_list(Raw),
|
||||
logger:log(Level, Entry).
|
||||
|
||||
159
src/hz_grids.erl
159
src/hz_grids.erl
@ -1,159 +0,0 @@
|
||||
%%% @doc
|
||||
%%% GRIDS URL parsing
|
||||
%%%
|
||||
%%% GRID(S): Gajumaru Remote Instruction Dispatch (Serialization)
|
||||
%%% GRIDS is a Gajumaru protocol for encoding wallet instructions as URLs.
|
||||
%%% Version 1 of the protocol consists of two verbs with two contexts each, collapsed to
|
||||
%%% four symbols for brevity.
|
||||
%%%
|
||||
%%% The GRIDS schema begins with "grids://" or "grid://"
|
||||
%%% Which way this is interpreted can vary depending on the verb.
|
||||
%%%
|
||||
%%% The typical "host" component is either an actual hostname or address and an optional
|
||||
%%% port number (the defaut port being 3013), or a Gajumaru chain network IDi (in which
|
||||
%%% case the port number is ignored if provided). Which way this field is interpreted
|
||||
%%% depends on the verb.
|
||||
%%%
|
||||
%%% The first element of the path after the host component indicates the protocol version.
|
||||
%%% Only version 1 exists at the time of this release.
|
||||
%%%
|
||||
%%% The next element of the path after the version is a single letter that indicates which
|
||||
%%% action to take. The following actions are available:
|
||||
%%% "s": Spend on Chain
|
||||
%%% Constructs a spend transaction to the address indicated in the path component
|
||||
%%% indicated in the final path element. Two qargs are valid in the trailing arguments
|
||||
%%% section: "a" for amount (in Pucks, not Gajus!), and "p" for data payload.
|
||||
%%% In this context the "host" field in the URL is interpreted as a chain network ID.
|
||||
%%% "t": Transfer (spend) on Host
|
||||
%%% The same as "spend" above, but in this context the host field of the URL is
|
||||
%%% interpreted as host[:port] information and the network chain ID that will be used
|
||||
%%% will be derived from whatever chain the given host reports.
|
||||
%%% "d": Dead-drop signature request
|
||||
%%% This instructs the wallet to retrieve a signature data blob from an HTTP or HTTPS
|
||||
%%% URL that can be reconstructed by replacing "grids" with "https" or "grid" with
|
||||
%%% "http", omitting the "/1/d" path component and then recnstructing the URL.
|
||||
%%% This provides a lightweight method for services to enable contract calls from
|
||||
%%% wallets that are not capable of compiling contract source.
|
||||
%%% @end
|
||||
|
||||
-module(hz_grids).
|
||||
-vsn("0.7.0").
|
||||
-export([url/2, parse/1, req/2, req/3]).
|
||||
|
||||
|
||||
-spec url(Instruction, HTTP) -> Result
|
||||
when Instruction :: spend | transfer | sign,
|
||||
HTTP :: uri_string:uri_string(),
|
||||
GRIDS :: uri_string:uri_string(),
|
||||
Result :: {ok, GRIDS} | uri_string:uri_error().
|
||||
%% @doc
|
||||
%% Takes
|
||||
|
||||
url(Instruction, HTTP) ->
|
||||
case uri_string:parse(HTTP) of
|
||||
U = #{scheme := "https"} -> url2(Instruction, U#{scheme := "grids"});
|
||||
U = #{scheme := "http"} -> url2(Instruction, U#{scheme := "grid"});
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
url2(Instruction, URL = #{path := Path}) ->
|
||||
GRIDS =
|
||||
case Instruction of
|
||||
spend -> URL#{path := "/1/s" ++ Path};
|
||||
transfer -> URL#{path := "/1/t" ++ Path};
|
||||
sign -> URL#{path := "/1/d" ++ Path}
|
||||
end,
|
||||
{ok, uri_string:recompose(GRIDS)}.
|
||||
|
||||
|
||||
-spec parse(GRIDS) -> Result
|
||||
when GRIDS :: string(),
|
||||
Result :: {ok, Instruction} | uri_string:error(),
|
||||
Instruction :: {{spend, chain | node}, {Location, Recipient, Amount, Payload}}
|
||||
| {{sign, http | https}, URL},
|
||||
Location :: Node :: {inet:ip_address() | inet:hostname(), inet:port_number()}
|
||||
| Chain :: binary(),
|
||||
Recipient :: gajudesk:id(),
|
||||
Amount :: non_neg_integer(),
|
||||
Payload :: binary(),
|
||||
URL :: string().
|
||||
|
||||
parse(GRIDS) ->
|
||||
case uri_string:parse(GRIDS) of
|
||||
#{path := "/1/s/" ++ R, host := H, query := Q, scheme := "grids"} ->
|
||||
spend(R, chain, list_to_binary(H), Q);
|
||||
#{path := "/1/s/" ++ R, host := H, query := Q, scheme := "grid"} ->
|
||||
spend(R, chain, list_to_binary(H), Q);
|
||||
#{path := "/1/t/" ++ R, host := H, port := P, query := Q, scheme := "grids"} ->
|
||||
spend(R, node, {H, P}, Q);
|
||||
#{path := "/1/t/" ++ R, host := H, port := P, query := Q, scheme := "grid"} ->
|
||||
spend(R, node, {H, P}, Q);
|
||||
#{path := "/1/t/" ++ R, host := H, query := Q, scheme := "grids"} ->
|
||||
spend(R, node, {H, 3013}, Q);
|
||||
#{path := "/1/t/" ++ R, host := H, query := Q, scheme := "grid"} ->
|
||||
spend(R, node, {H, 3013}, Q);
|
||||
U = #{path := "/1/d/" ++ L, scheme := "grids"} ->
|
||||
HTTP = uri_string:recompose(U#{scheme := "https", path := L}),
|
||||
{ok ,{{sign, https}, HTTP}};
|
||||
U = #{path := "/1/d/" ++ L, scheme := "grid"} ->
|
||||
HTTP = uri_string:recompose(U#{scheme := "http", path := L}),
|
||||
{ok, {{sign, http}, HTTP}};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
spend(Recipient, Context, Location, Qwargs) ->
|
||||
case dissect_query(Qwargs) of
|
||||
{ok, Amount, Payload} ->
|
||||
{ok, {{spend, Context}, {Location, Recipient, Amount, Payload}}};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
|
||||
dissect_query(Qwargs) ->
|
||||
case uri_string:dissect_query(Qwargs) of
|
||||
{error, Reason, Info} ->
|
||||
{error, Reason, Info};
|
||||
ArgList ->
|
||||
case l_to_i(proplists:get_value("a", ArgList, "0")) of
|
||||
{ok, Amount} ->
|
||||
Payload = list_to_binary(proplists:get_value("p", ArgList, "")),
|
||||
{ok, Amount, Payload};
|
||||
Error ->
|
||||
Error
|
||||
end
|
||||
end.
|
||||
|
||||
l_to_i(S) ->
|
||||
try
|
||||
{ok, list_to_integer(S)}
|
||||
catch
|
||||
error:badarg -> {error, bad_url}
|
||||
end.
|
||||
|
||||
|
||||
req(Type, Message) ->
|
||||
req(Type, Message, false).
|
||||
|
||||
req(sign, Message, ID) ->
|
||||
#{"grids" => 1,
|
||||
"chain" => "gajumaru",
|
||||
"network_id" => hz:network_id(),
|
||||
"type" => "message",
|
||||
"public_id" => ID,
|
||||
"payload" => Message};
|
||||
req(tx, Data, ID) ->
|
||||
#{"grids" => 1,
|
||||
"chain" => "gajumaru",
|
||||
"network_id" => hz:network_id(),
|
||||
"type" => "tx",
|
||||
"public_id" => ID,
|
||||
"payload" => Data};
|
||||
req(ack, Message, ID) ->
|
||||
#{"grids" => 1,
|
||||
"chain" => "gajumaru",
|
||||
"network_id" => hz:network_id(),
|
||||
"type" => "ack",
|
||||
"public_id" => ID,
|
||||
"payload" => Message}.
|
||||
@ -1,153 +0,0 @@
|
||||
%%% @doc
|
||||
%%% Key functions
|
||||
%%%
|
||||
%%% The main reason this is a module of its own is that in the original architecture
|
||||
%%% it was a process rather than just a library of functions. Now that it exists, though,
|
||||
%%% there is little motivation to cram everything here into the controller process's
|
||||
%%% code.
|
||||
%%% @end
|
||||
|
||||
-module(hz_key_master).
|
||||
-vsn("0.7.0").
|
||||
|
||||
|
||||
-export([make_key/1, encode/1, decode/1]).
|
||||
-export([lcg/1]).
|
||||
|
||||
make_key(<<>>) ->
|
||||
Pair = #{public := Public} = ecu_eddsa:sign_keypair(),
|
||||
ID = gmser_api_encoder:encode(account_pubkey, Public),
|
||||
{ID, Pair};
|
||||
make_key(Seed) ->
|
||||
Pair = #{public := Public} = ecu_eddsa:sign_seed_keypair(Seed),
|
||||
ID = gmser_api_encoder:encode(account_pubkey, Public),
|
||||
{ID, Pair}.
|
||||
|
||||
|
||||
-spec encode(Secret) -> Phrase
|
||||
when Secret :: binary(),
|
||||
Phrase :: string().
|
||||
%% @doc
|
||||
%% The encoding and decoding procesures are written to be able to handle any
|
||||
%% width of bitstring or binary and a variable size dictionary. The magic numbers
|
||||
%% 32, 4096 and 12 have been dropped in because currently these are known, but that
|
||||
%% will change in the future if the key size or type changes.
|
||||
|
||||
encode(Bin) ->
|
||||
<<Number:(32 * 8)>> = Bin,
|
||||
DictSize = 4096,
|
||||
Words = read_words(),
|
||||
% Width = chunksize(DictSize - 1, 2),
|
||||
Width = 12,
|
||||
Chunks = chunksize(Number, DictSize),
|
||||
Binary = <<Number:(Chunks * Width)>>,
|
||||
encode(Width, Binary, Words).
|
||||
|
||||
encode(Width, Bits, Words) ->
|
||||
CheckSum = checksum(Width, Bits),
|
||||
encode(Width, <<CheckSum:Width, Bits/bitstring>>, Words, []).
|
||||
|
||||
encode(_, <<>>, _, Acc) ->
|
||||
unicode:characters_to_list(lists:join(" ", lists:reverse(Acc)));
|
||||
encode(Width, Bits, Words, Acc) ->
|
||||
<<I:Width, Rest/bitstring>> = Bits,
|
||||
Word = lists:nth(I + 1, Words),
|
||||
encode(Width, Rest, Words, [Word | Acc]).
|
||||
|
||||
|
||||
-spec decode(Phrase) -> {ok, Secret} | {error, Reason}
|
||||
when Phrase :: string(),
|
||||
Secret :: binary(),
|
||||
Reason :: bad_phrase | bad_word.
|
||||
%% @doc
|
||||
%% Reverses the encoded secret string back into its binary representation.
|
||||
|
||||
decode(Encoded) ->
|
||||
DictSize = 4096,
|
||||
Words = read_words(),
|
||||
Width = chunksize(DictSize - 1, 2),
|
||||
decode(Width, Words, Encoded).
|
||||
|
||||
decode(Width, Words, Encoded) when is_list(Encoded) ->
|
||||
decode(Width, Words, list_to_binary(Encoded));
|
||||
decode(Width, Words, Encoded) ->
|
||||
Split = string:lexemes(Encoded, " "),
|
||||
decode(Width, Words, Split, <<>>).
|
||||
|
||||
decode(Width, Words, [Word | Rest], Acc) ->
|
||||
case find(Word, Words) of
|
||||
{ok, N} -> decode(Width, Words, Rest, <<Acc/bitstring, N:Width>>);
|
||||
Error -> Error
|
||||
end;
|
||||
decode(Width, _, [], Acc) ->
|
||||
sumcheck(Width, Acc).
|
||||
|
||||
|
||||
chunksize(N, C) ->
|
||||
chunksize(N, C, 0).
|
||||
|
||||
chunksize(0, _, A) -> A;
|
||||
chunksize(N, C, A) -> chunksize(N div C, C, A + 1).
|
||||
|
||||
|
||||
read_words() ->
|
||||
ModPath = code:which(?MODULE),
|
||||
Path = filename:join([filename:dirname(filename:dirname(ModPath)), "priv", "words4096.txt"]),
|
||||
{ok, Bin} = file:read_file(Path),
|
||||
string:lexemes(Bin, "\n").
|
||||
|
||||
|
||||
find(Word, Words) ->
|
||||
find(Word, Words, 0).
|
||||
|
||||
find(Word, [Word | _], N) -> {ok, N};
|
||||
find(Word, [_ | Rest], N) -> find(Word, Rest, N + 1);
|
||||
find(Word, [], _) -> {error, {bad_word, Word}}.
|
||||
|
||||
|
||||
checksum(Width, Bits) ->
|
||||
checksum(Width, Bits, 0).
|
||||
|
||||
checksum(_, <<>>, Sum) ->
|
||||
Sum;
|
||||
checksum(Width, Bits, Sum) ->
|
||||
<<N:Width, Rest/bitstring>> = Bits,
|
||||
checksum(Width, Rest, N bxor Sum).
|
||||
|
||||
|
||||
sumcheck(Width, Bits) ->
|
||||
<<CheckSum:Width, Binary/bitstring>> = Bits,
|
||||
case checksum(Width, Binary) =:= CheckSum of
|
||||
true ->
|
||||
<<N:(bit_size(Binary))>> = Binary,
|
||||
{ok, <<N:(32 * 8)>>};
|
||||
false ->
|
||||
{error, bad_phrase}
|
||||
end.
|
||||
|
||||
|
||||
|
||||
-spec lcg(integer()) -> integer().
|
||||
%% A simple PRNG that fits into 32 bits and is easy to implement anywhere (Kotlin).
|
||||
%% Specifically, it is a "linear congruential generator" of the Lehmer variety.
|
||||
%% The constants used are based on recommendations from Park, Miller and Stockmeyer:
|
||||
%% https://www.firstpr.com.au/dsp/rand31/p105-crawford.pdf#page=4
|
||||
%%
|
||||
%% The input value should be between 1 and 2^31-1.
|
||||
%%
|
||||
%% The purpose of this PRNG is for password-based dictionary shuffling.
|
||||
|
||||
lcg(N) ->
|
||||
M = 16#7FFFFFFF,
|
||||
A = 48271,
|
||||
Q = 44488, % M div A
|
||||
R = 3399, % M rem A
|
||||
Div = N div Q,
|
||||
Rem = N rem Q,
|
||||
S = Rem * A,
|
||||
T = Div * R,
|
||||
Result = S - T,
|
||||
case Result < 0 of
|
||||
false -> Result;
|
||||
true -> Result + M
|
||||
end.
|
||||
@ -9,7 +9,7 @@
|
||||
%%% @end
|
||||
|
||||
-module(hz_man).
|
||||
-vsn("0.7.0").
|
||||
-vsn("0.5.1").
|
||||
-behavior(gen_server).
|
||||
-author("Craig Everett <ceverett@tsuriai.jp>").
|
||||
-copyright("Craig Everett <ceverett@tsuriai.jp>").
|
||||
@ -21,7 +21,7 @@
|
||||
timeout/0, timeout/1]).
|
||||
|
||||
%% The whole point of this module:
|
||||
-export([request_sticky/1, request_sticky/2, request/1, request/2]).
|
||||
-export([request/1, request/2]).
|
||||
|
||||
%% gen_server goo
|
||||
-export([start_link/0]).
|
||||
@ -29,6 +29,7 @@
|
||||
code_change/3, terminate/2]).
|
||||
|
||||
|
||||
|
||||
%%% Type and Record Definitions
|
||||
|
||||
-record(fetcher,
|
||||
@ -94,25 +95,6 @@ timeout(Value) when 0 < Value, Value =< 120000 ->
|
||||
gen_server:cast(?MODULE, {timeout, Value}).
|
||||
|
||||
|
||||
-spec request_sticky(Path) -> {ok, Value} | {error, Reason}
|
||||
when Path :: unicode:charlist(),
|
||||
Value :: map(),
|
||||
Reason :: hz:chain_error().
|
||||
|
||||
request_sticky(Path) ->
|
||||
gen_server:call(?MODULE, {request_sticky, {get, Path}}, infinity).
|
||||
|
||||
|
||||
-spec request_sticky(Path, Data) -> {ok, Value} | {error, Reason}
|
||||
when Path :: unicode:charlist(),
|
||||
Data :: unicode:charlist(),
|
||||
Value :: map(),
|
||||
Reason :: hz:chain_error().
|
||||
|
||||
request_sticky(Path, Data) ->
|
||||
gen_server:call(?MODULE, {request_sticky, {post, Path, Data}}, infinity).
|
||||
|
||||
|
||||
-spec request(Path) -> {ok, Value} | {error, Reason}
|
||||
when Path :: unicode:charlist(),
|
||||
Value :: map(),
|
||||
@ -164,13 +146,10 @@ init(none) ->
|
||||
handle_call({request, Request}, From, State) ->
|
||||
NewState = do_request(Request, From, State),
|
||||
{noreply, NewState};
|
||||
handle_call({request_sticky, Request}, From, State) ->
|
||||
NewState = do_request_sticky(Request, From, State),
|
||||
{noreply, NewState};
|
||||
handle_call(tls, _, State = #s{tls = TLS}) ->
|
||||
{reply, TLS, State};
|
||||
handle_call(chain_nodes, _, State) ->
|
||||
Nodes = do_chain_nodes(State),
|
||||
handle_call(chain_nodes, _, State = #s{chain_nodes = {Wait, Used}}) ->
|
||||
Nodes = lists:append(Wait, Used),
|
||||
{reply, Nodes, State};
|
||||
handle_call(timeout, _, State = #s{timeout = Value}) ->
|
||||
{reply, Value, State};
|
||||
@ -182,9 +161,10 @@ handle_call(Unexpected, From, State) ->
|
||||
handle_cast({tls, Boolean}, State) ->
|
||||
NewState = do_tls(Boolean, State),
|
||||
{noreply, NewState};
|
||||
handle_cast({chain_nodes, List}, State) ->
|
||||
NewState = do_chain_nodes(List, State),
|
||||
{noreply, NewState};
|
||||
handle_cast({chain_nodes, []}, State) ->
|
||||
{noreply, State#s{chain_nodes = {[], []}}};
|
||||
handle_cast({chain_nodes, ToUse}, State) ->
|
||||
{noreply, State#s{chain_nodes = {ToUse, []}}};
|
||||
handle_cast({timeout, Value}, State) ->
|
||||
{noreply, State#s{timeout = Value}};
|
||||
handle_cast(Unexpected, State) ->
|
||||
@ -239,23 +219,6 @@ terminate(_, _) ->
|
||||
|
||||
%%% Doer Functions
|
||||
|
||||
do_chain_nodes(#s{sticky = none, chain_nodes = {Wait, Used}}) ->
|
||||
lists:append(Wait, Used);
|
||||
do_chain_nodes(#s{sticky = Sticky, chain_nodes = {Wait, Used}}) ->
|
||||
case lists:append(Wait, Used) of
|
||||
[Sticky] -> [Sticky];
|
||||
Nodes -> [Sticky | Nodes]
|
||||
end.
|
||||
|
||||
|
||||
do_chain_nodes([], State) ->
|
||||
State#s{sticky = none, chain_nodes = {[], []}};
|
||||
do_chain_nodes(List = [Sticky], State) ->
|
||||
State#s{sticky = Sticky, chain_nodes = {List, []}};
|
||||
do_chain_nodes([Sticky | List], State) ->
|
||||
State#s{sticky = Sticky, chain_nodes = {List, []}}.
|
||||
|
||||
|
||||
do_tls(true, State) ->
|
||||
ok = ssl:start(),
|
||||
State#s{tls = true};
|
||||
@ -265,21 +228,17 @@ do_tls(_, State) ->
|
||||
State.
|
||||
|
||||
|
||||
do_request_sticky(_, From, State = #s{sticky = none}) ->
|
||||
do_request(_, From, State = #s{chain_nodes = {[], []}}) ->
|
||||
ok = gen_server:reply(From, {error, no_nodes}),
|
||||
State;
|
||||
do_request_sticky(Request,
|
||||
do_request(Request,
|
||||
From,
|
||||
State = #s{tls = TLS,
|
||||
State = #s{tls = false,
|
||||
fetchers = Fetchers,
|
||||
sticky = Node,
|
||||
chain_nodes = {[Node | Rest], Used},
|
||||
timeout = Timeout}) ->
|
||||
Now = erlang:system_time(nanosecond),
|
||||
Fetcher =
|
||||
case TLS of
|
||||
true -> fun() -> hz_fetcher:connect_slowly(Node, Request, From, Timeout) end;
|
||||
false -> fun() -> hz_fetcher:connect(Node, Request, From, Timeout) end
|
||||
end,
|
||||
Fetcher = fun() -> hz_fetcher:connect(Node, Request, From, Timeout) end,
|
||||
{PID, Mon} = spawn_monitor(Fetcher),
|
||||
New = #fetcher{pid = PID,
|
||||
mon = Mon,
|
||||
@ -287,24 +246,15 @@ do_request_sticky(Request,
|
||||
node = Node,
|
||||
from = From,
|
||||
req = Request},
|
||||
State#s{fetchers = [New | Fetchers]}.
|
||||
|
||||
|
||||
do_request(_, From, State = #s{chain_nodes = {[], []}}) ->
|
||||
ok = gen_server:reply(From, {error, no_nodes}),
|
||||
State;
|
||||
State#s{fetchers = [New | Fetchers], chain_nodes = {Rest, [Node | Used]}};
|
||||
do_request(Request,
|
||||
From,
|
||||
State = #s{tls = TLS,
|
||||
State = #s{tls = true,
|
||||
fetchers = Fetchers,
|
||||
chain_nodes = {[Node | Rest], Used},
|
||||
timeout = Timeout}) ->
|
||||
Now = erlang:system_time(nanosecond),
|
||||
Fetcher =
|
||||
case TLS of
|
||||
true -> fun() -> hz_fetcher:connect_slowly(Node, Request, From, Timeout) end;
|
||||
false -> fun() -> hz_fetcher:connect(Node, Request, From, Timeout) end
|
||||
end,
|
||||
Fetcher = fun() -> hz_fetcher:slowly_connect(Node, Request, From, Timeout) end,
|
||||
{PID, Mon} = spawn_monitor(Fetcher),
|
||||
New = #fetcher{pid = PID,
|
||||
mon = Mon,
|
||||
@ -318,6 +268,8 @@ do_request(Request, From, State = #s{chain_nodes = {[], Used}}) ->
|
||||
do_request(Request, From, State#s{chain_nodes = {Fresh, []}}).
|
||||
|
||||
|
||||
|
||||
|
||||
log(Level, Format, Args) ->
|
||||
Raw = io_lib:format("~w ~w: " ++ Format, [?MODULE, self() | Args]),
|
||||
Entry = unicode:characters_to_list(Raw),
|
||||
|
||||
@ -9,7 +9,7 @@
|
||||
%%% @end
|
||||
|
||||
-module(hz_sup).
|
||||
-vsn("0.7.0").
|
||||
-vsn("0.5.1").
|
||||
-behaviour(supervisor).
|
||||
-author("Craig Everett <zxq9@zxq9.com>").
|
||||
-copyright("Craig Everett <zxq9@zxq9.com>").
|
||||
|
||||
@ -4,10 +4,10 @@
|
||||
{prefix,"hz"}.
|
||||
{desc,"Gajumaru interoperation library"}.
|
||||
{author,"Craig Everett"}.
|
||||
{package_id,{"otpr","hakuzaru",{0,7,0}}}.
|
||||
{deps,[{"otpr","sophia",{9,0,0}},
|
||||
{"otpr","gmserialization",{0,1,3}},
|
||||
{package_id,{"otpr","hakuzaru",{0,5,1}}}.
|
||||
{deps,[{"otpr","sophia",{8,0,1}},
|
||||
{"otpr","gmbytecode",{3,4,1}},
|
||||
{"otpr","gmserialization",{0,1,2}},
|
||||
{"otpr","base58",{0,1,1}},
|
||||
{"otpr","eblake2",{1,0,1}},
|
||||
{"otpr","ec_utils",{1,0,0}},
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user