summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRussell Branca <chewbranca@apache.org>2018-03-12 22:05:44 +0000
committerRussell Branca <chewbranca@apache.org>2018-03-12 22:26:37 +0000
commit09c58ad5b7176888d5ac24e3d5fe222648505455 (patch)
treeba1644bd431fbfe0843da73886e35ebf8e646fc2
parent3b53c1c92af6dc774995fd8f1009d1306248477c (diff)
downloadcouchdb-experiment-transient-stats.tar.gz
EXPERIMENT: enable couch_file transient statsarchive/experiment-transient-statsexperiment-transient-stats
-rw-r--r--src/chttpd/src/chttpd_misc.erl10
-rw-r--r--src/couch/src/couch_file.erl85
-rw-r--r--src/couch_stats/src/couch_stats.erl4
-rw-r--r--src/couch_stats/src/couch_stats_aggregator.erl23
4 files changed, 113 insertions, 9 deletions
diff --git a/src/chttpd/src/chttpd_misc.erl b/src/chttpd/src/chttpd_misc.erl
index 253da233e..0f0072edb 100644
--- a/src/chttpd/src/chttpd_misc.erl
+++ b/src/chttpd/src/chttpd_misc.erl
@@ -332,6 +332,16 @@ handle_node_req(#httpd{method='GET', path_parts=[_, Node, <<"_stats">> | Path]}=
chttpd:send_json(Req, EJSON1);
handle_node_req(#httpd{path_parts=[_, _Node, <<"_stats">>]}=Req) ->
send_method_not_allowed(Req, "GET");
+handle_node_req(#httpd{method='GET', path_parts=[_, Node, <<"_transient_stats">> | Path]}=Req) ->
+ flush(Node, Req),
+ Stats0 = call_node(Node, couch_stats, fetch_transient, []),
+ Stats = couch_stats_httpd:transform_stats(Stats0),
+ Nested = couch_stats_httpd:nest(Stats),
+ EJSON0 = couch_stats_httpd:to_ejson(Nested),
+ EJSON1 = couch_stats_httpd:extract_path(Path, EJSON0),
+ chttpd:send_json(Req, EJSON1);
+handle_node_req(#httpd{path_parts=[_, _Node, <<"_transient_stats">>]}=Req) ->
+ send_method_not_allowed(Req, "GET");
% GET /_node/$node/_system
handle_node_req(#httpd{method='GET', path_parts=[_, Node, <<"_system">>]}=Req) ->
Stats = call_node(Node, chttpd_misc, get_stats, []),
diff --git a/src/couch/src/couch_file.erl b/src/couch/src/couch_file.erl
index acd4fda78..3e36285b2 100644
--- a/src/couch/src/couch_file.erl
+++ b/src/couch/src/couch_file.erl
@@ -129,7 +129,7 @@ append_term_md5(Fd, Term, Options) ->
append_binary(Fd, Bin) ->
ioq:call(Fd, {append_bin, assemble_file_chunk(Bin)}, erlang:get(io_priority)).
-
+
append_binary_md5(Fd, Bin) ->
ioq:call(Fd,
{append_bin, assemble_file_chunk(Bin, crypto:hash(md5, Bin))},
@@ -358,7 +358,7 @@ init({Filepath, Options, ReturnPid, Ref}) ->
Limit = get_pread_limit(),
IsSys = lists:member(sys_db, Options),
update_read_timestamp(),
- case lists:member(create, Options) of
+ InitStatus = case lists:member(create, Options) of
true ->
filelib:ensure_dir(Filepath),
case file:open(Filepath, OpenOptions) of
@@ -406,7 +406,12 @@ init({Filepath, Options, ReturnPid, Ref}) ->
Error ->
init_status_error(ReturnPid, Ref, Error)
end
- end.
+ end,
+ case InitStatus of
+ {ok, _} -> ok = create_couch_file_metrics(Filepath);
+ _ -> ok
+ end,
+ InitStatus.
file_open_options(Options) ->
[read, raw, binary] ++ case lists:member(read_only, Options) of
@@ -427,6 +432,19 @@ maybe_track_open_os_files(Options) ->
terminate(_Reason, #file{fd = nil}) ->
ok;
terminate(_Reason, #file{fd = Fd}) ->
+ Filepath = erlang:get(metrics_name),
+ Metrics = [
+ {counter, ["reads", "count"]},
+ {counter, ["reads", "bytes"]},
+ {histogram, ["reads", "latency"]},
+ {counter, ["writes", "count"]},
+ {counter, ["writes", "bytes"]},
+ {histogram, ["writes", "latency"]}
+ ],
+ lists:foreach(fun({_, Name0}) ->
+ Name = ["transient", "couch_file", Filepath | Name0],
+ couch_stats:delete(Name)
+ end, Metrics),
ok = file:close(Fd).
handle_call(Msg, From, File) when ?IS_OLD_STATE(File) ->
@@ -437,8 +455,9 @@ handle_call(close, _From, #file{fd=Fd}=File) ->
handle_call({pread_iolist, Pos}, _From, File) ->
update_read_timestamp(),
+ T0 = os:timestamp(),
{LenIolist, NextPos} = read_raw_iolist_int(File, Pos, 4),
- case iolist_to_binary(LenIolist) of
+ Res = case iolist_to_binary(LenIolist) of
<<1:1/integer,Len:31/integer>> -> % an MD5-prefixed term
{Md5AndIoList, _} = read_raw_iolist_int(File, NextPos, Len+16),
{Md5, IoList} = extract_md5(Md5AndIoList),
@@ -446,7 +465,11 @@ handle_call({pread_iolist, Pos}, _From, File) ->
<<0:1/integer,Len:31/integer>> ->
{Iolist, _} = read_raw_iolist_int(File, NextPos, Len),
{reply, {ok, Iolist, <<>>}, File}
- end;
+ end,
+ T1 = os:timestamp(),
+ Delta = timer:now_diff(T1, T0) div 1000,
+ ok = update_read_metrics(iolist_size(LenIolist), Delta),
+ Res;
handle_call(bytes, _From, #file{fd = Fd} = File) ->
{reply, file:position(Fd, eof), File};
@@ -472,14 +495,19 @@ handle_call({truncate, Pos}, _From, #file{fd=Fd}=File) ->
end;
handle_call({append_bin, Bin}, _From, #file{fd = Fd, eof = Pos} = File) ->
+ T0 = os:timestamp(),
Blocks = make_blocks(Pos rem ?SIZE_BLOCK, Bin),
Size = iolist_size(Blocks),
- case file:write(Fd, Blocks) of
+ Res = case file:write(Fd, Blocks) of
ok ->
{reply, {ok, Pos, Size}, File#file{eof = Pos + Size}};
Error ->
{reply, Error, reset_eof(File)}
- end;
+ end,
+ T1 = os:timestamp(),
+ Delta = timer:now_diff(T1, T0) div 1000,
+ ok = update_write_metrics(Size, Delta),
+ Res;
handle_call({write_header, Bin}, _From, #file{fd = Fd, eof = Pos} = File) ->
BinSize = byte_size(Bin),
@@ -501,6 +529,19 @@ handle_call(find_header, _From, #file{fd = Fd, eof = Pos} = File) ->
{reply, find_header(Fd, Pos div ?SIZE_BLOCK), File}.
handle_cast(close, Fd) ->
+ Filepath = erlang:get(metrics_name),
+ Metrics = [
+ {counter, ["reads", "count"]},
+ {counter, ["reads", "bytes"]},
+ {histogram, ["reads", "latency"]},
+ {counter, ["writes", "count"]},
+ {counter, ["writes", "bytes"]},
+ {histogram, ["writes", "latency"]}
+ ],
+ lists:foreach(fun({_, Name0}) ->
+ Name = ["transient", "couch_file", Filepath | Name0],
+ couch_stats:delete(Name)
+ end, Metrics),
{stop,normal,Fd}.
code_change(_OldVsn, State, _Extra) ->
@@ -733,6 +774,36 @@ reset_eof(#file{} = File) ->
{ok, Eof} = file:position(File#file.fd, eof),
File#file{eof = Eof}.
+create_couch_file_metrics(Filepath0) ->
+ %% Drop .couch and .suffix
+ Filepath = filename:rootname(filename:rootname(Filepath0)),
+ erlang:put(metrics_name, Filepath),
+ Metrics = [
+ {counter, ["reads", "count"]},
+ {counter, ["reads", "bytes"]},
+ {histogram, ["reads", "latency"]},
+ {counter, ["writes", "count"]},
+ {counter, ["writes", "bytes"]},
+ {histogram, ["writes", "latency"]}
+ ],
+ lists:foreach(fun({Type, Name0}) ->
+ Name = ["transient", "couch_file", Filepath | Name0],
+ ok = couch_stats:new(Type, Name)
+ end, Metrics).
+
+update_read_metrics(Bytes, Delta) ->
+ update_metrics("reads", Bytes, Delta).
+
+update_write_metrics(Bytes, Delta) ->
+ update_metrics("writes", Bytes, Delta).
+
+update_metrics(Type, Bytes, Delta) ->
+ Name = erlang:get(metrics_name),
+ catch couch_stats:increment_counter(["transient", "couch_file", Name, Type, "count"]),
+ catch couch_stats:increment_counter(["transient", "couch_file", Name, Type, "bytes"], Bytes),
+ catch couch_stats:update_histogram(["transient", "couch_file", Name, Type, "latency"], Delta),
+ ok.
+
-ifdef(TEST).
-include_lib("couch/include/couch_eunit.hrl").
diff --git a/src/couch_stats/src/couch_stats.erl b/src/couch_stats/src/couch_stats.erl
index 4fde14acb..fb5f07a8a 100644
--- a/src/couch_stats/src/couch_stats.erl
+++ b/src/couch_stats/src/couch_stats.erl
@@ -16,6 +16,7 @@
start/0,
stop/0,
fetch/0,
+ fetch_transient/0,
reload/0,
sample/1,
new/2,
@@ -45,6 +46,9 @@ stop() ->
fetch() ->
couch_stats_aggregator:fetch().
+fetch_transient() ->
+ couch_stats_aggregator:fetch_transient().
+
reload() ->
couch_stats_aggregator:reload().
diff --git a/src/couch_stats/src/couch_stats_aggregator.erl b/src/couch_stats/src/couch_stats_aggregator.erl
index 17bd6fc33..163069aa8 100644
--- a/src/couch_stats/src/couch_stats_aggregator.erl
+++ b/src/couch_stats/src/couch_stats_aggregator.erl
@@ -16,6 +16,7 @@
-export([
fetch/0,
+ fetch_transient/0,
flush/0,
reload/0
]).
@@ -36,6 +37,7 @@
-record(st, {
descriptions,
stats,
+ tstats,
collect_timer,
reload_timer
}).
@@ -44,6 +46,10 @@ fetch() ->
{ok, Stats} = gen_server:call(?MODULE, fetch),
Stats.
+fetch_transient() ->
+ {ok, Stats} = gen_server:call(?MODULE, fetch_transient),
+ Stats.
+
flush() ->
gen_server:call(?MODULE, flush).
@@ -58,10 +64,12 @@ init([]) ->
Interval = config:get_integer("stats", "interval", ?DEFAULT_INTERVAL),
{ok, CT} = timer:send_interval(Interval * 1000, self(), collect),
{ok, RT} = timer:send_interval(?RELOAD_INTERVAL * 1000, self(), reload),
- {ok, #st{descriptions=Descs, stats=[], collect_timer=CT, reload_timer=RT}}.
+ {ok, #st{descriptions=Descs, stats=[], tstats=[], collect_timer=CT, reload_timer=RT}}.
handle_call(fetch, _from, #st{stats = Stats}=State) ->
{reply, {ok, Stats}, State};
+handle_call(fetch_transient, _from, #st{tstats = TStats}=State) ->
+ {reply, {ok, TStats}, State};
handle_call(flush, _From, State) ->
{reply, ok, collect(State)};
handle_call(reload, _from, State) ->
@@ -147,4 +155,15 @@ collect(State) ->
end,
State#st.descriptions
),
- State#st{stats=Stats}.
+ TStats = lists:foldl(
+ fun
+ ({["transient" | _] = Name, Props0}, Acc) ->
+ Props = proplists:delete(tags, Props0),
+ [{Name, [{value, couch_stats:sample(Name)}|Props]} | Acc];
+ (_, Acc) ->
+ Acc
+ end,
+ [],
+ folsom_metrics:get_metrics_info()
+ ),
+ State#st{stats=Stats, tstats=TStats}.