Commit 3d5ce811 authored by Heinz N. Gies's avatar Heinz N. Gies

Merge

parents 702abade f9148580
Pipeline #12949078 passed with stage
in 1 minute and 14 seconds
......@@ -18,3 +18,4 @@ apps/esyslog/src/esyslog_message_lexer.erl
apps/esyslog/src/esyslog_message_parser.erl
*~
rel/deb/*.deb
compile_commands.json
### Generic cli file for services
stages:
- test # run tets
- package # verify package creation
################################################################################
## helpers
################################################################################
.not_triggered: &not_triggered
except:
- pipelines
.is_triggered: &is_triggered
only:
- pipelines
################################################################################
## package stage
################################################################################
#trigger_dialyzer:
# <<: *is_triggered
# image: erlang:19
# stage: test
# script:
# - ./rebar3 upgrade $DOWNSTREAM_LIBRARY
# - ./rebar3 dialyzer
# tags:
- gitlab-org-high-cpu
trigger_xref:
<<: *is_triggered
image: erlang:19
stage: test
script:
- ./rebar3 upgrade $DOWNSTREAM_LIBRARY
- ./rebar3 xref
tags:
- gitlab-org-high-cpu
################################################################################
## package stage
################################################################################
# We make sure our package builds on both FreeBSD and SmartOS
smartos:
<<: *not_triggered
stage: package
script:
- export SUFFIX=$(if [ "$CI_BUILD_REF_NAME" = "master" ]; then echo ""; else /opt/local/bin/erl -noshell -eval '{{Y, MM, D}, {H, M, S}} = calendar:universal_time(), io:format("pre~4.10.0B~2.10.0B~2.10.0B~2.10.0B~2.10.0B~2.10.0B", [Y, MM, D, H, M, S]),init:stop()'; fi);
- export DS_VER=$(echo $CI_RUNNER_TAGS | sed 's/.*base64-\([0-9.]*\).*/\1/')
- export BRANCH=$(if [ "$CI_BUILD_REF_NAME" = "master" ]; then echo rel; else echo dev; fi)
- gmake dist
- mkdir -p rel/pkg/info
- aws s3 cp rel/pkg/*.tgz s3://release.project-fifo.net/pkg/$DS_VER/$BRANCH/
- pkg_info -X rel/pkg/*.tgz > rel/pkg/info/$(pkg_info -X rel/pkg/*.tgz | awk -F "=" '/FILE_NAME/ {print $2}')
- aws s3 cp rel/pkg/info/* s3://release-info.project-fifo.net/pkg/$DS_VER/$BRANCH/
- 'curl --request POST --form "token=$CI_JOB_TOKEN" --form "ref=master" --form "variables[DS_VER]=$DS_VER" --form "variables[BRANCH]=$BRANCH" https://gitlab.com/api/v4/projects/$REPO_PID/trigger/pipeline'
tags:
- erlang-19
- smartos
only:
refs:
- master
- dev
- /^dev-.*$/
smartos_test:
stage: test
script:
- ./rebar3 as lint lint
- ./rebar3 xref
#- ./rebar3 dialyzer
tags:
- erlang-19
- smartos
except:
- pipelines
- master
- dev
- /^dev-.*$/
################################################################################
## bsd stage
################################################################################
bsd:
<<: *not_triggered
stage: package
script:
- export SUFFIX=$(if [ "$CI_BUILD_REF_NAME" = "master" ]; then echo ""; else /usr/local/bin/erl -noshell -eval '{{Y, MM, D}, {H, M, S}} = calendar:universal_time(), io:format("pre~4.10.0B~2.10.0B~2.10.0B~2.10.0B~2.10.0B~2.10.0B", [Y, MM, D, H, M, S]),init:stop()'; fi);
- export OS_VER=$(echo $CI_RUNNER_TAGS | sed 's/.*freebsd-\([0-9.]*\).*/\1/')
- export ARCH=$(uname -m)
- export BRANCH=$(if [ "$CI_BUILD_REF_NAME" = "master" ]; then echo rel; else echo dev; fi)
- gmake dist
- aws s3 cp rel/pkgng/*.txz s3://freebsd.project-fifo.net/$BRANCH/$ARCH/$OS_VER/
- 'curl --request POST --form "token=$CI_JOB_TOKEN" --form "ref=master" --form "variables[ARCH]=$ARCH" --form "variables[OS_VER]=$OS_VER" --form "variables[BRANCH]=$BRANCH" https://gitlab.com/api/v4/projects/$PKGNG_REPO_PID/trigger/pipeline'
tags:
- erlang-19
- freebsd-11.0
only:
refs:
- master
- dev
- /^dev-.*$/
......@@ -25,4 +25,7 @@ deb-package: deb-prepare
package: rel
$(MAKE) -C rel/pkg package
version_header:
true
FORCE:
......@@ -18,13 +18,13 @@ Requirements: As per the diagram you will need both DalmatinerDB and Postgres ru
# Metric Listener Configuration
Settings are configured in dalmatinerpx.conf
Settings are configured in dpx.conf
## DQE Indexer backend
Firstly, it is important to configure the dqe indexer module that you intend to use.
For example, for the Postgres Indexer, configure your dalmatinerpx.conf as follows:
For example, for the Postgres Indexer, configure your dpx.conf as follows:
```
idx.backend = dqe_idx_pg
```
......@@ -120,7 +120,7 @@ listeners.dp_otsdb.port = 4242
# Log Listener Configuration
Settings are configured in dalmatinerpx.conf
Settings are configured in dpx.conf
## Syslog
......
{application, ddb_proxy,
[{description, "Dalmatiner proxy"},
{vsn, "0.3.2p6"},
{vsn, "0.3.3"},
{registered, []},
{mod, { ddb_proxy_app, []}},
{applications,
......
......@@ -6,7 +6,7 @@
%%% @end
%%% Created : 7 Jun 2016 by Heinz Nikolaus Gies <heinz@licenser.net>
%%%-------------------------------------------------------------------
-module(dqe_proxy_prom_scraper).
-module(ddb_proxy_prom_scraper).
-behaviour(gen_server).
......@@ -24,6 +24,9 @@
-record(state, {url :: string(),
freq :: pos_integer(),
bucket :: binary(),
%% this looks silly but the prom data
%% already arrives in 1s resolution
res = 1000 div 1000 :: pos_integer(),
ddb}).
%%%===================================================================
......@@ -61,9 +64,11 @@ init([Name, Bucket, URL, Freq]) ->
erlang:send_after(Freq, self(), scrape),
{Host, Port} = dp_util:ddb_config(),
C = dp_util:ddb_c(ddb_tcp:connect(Host,Port)),
C1 = dp_util:ddb_c(ddb_tcp:stream_mode(Bucket, 5, C)),
{ok, #state{bucket = Bucket, url = URL, freq = Freq, ddb = C1}}.
{ok, #{resolution := Res}, C1} = ddb_tcp:bucket_info(Bucket, C),
%% Prom data already arrives in 1s resolution.
Res1 = Res div 1000,
C2 = dp_util:ddb_c(ddb_tcp:stream_mode(Bucket, 5, C1)),
{ok, #state{bucket = Bucket, url = URL, freq = Freq, ddb = C2, res = Res1}}.
%%--------------------------------------------------------------------
%% @private
......@@ -155,9 +160,9 @@ code_change(_OldVsn, State, _Extra) ->
%%% Internal functions
%%%===================================================================
do_send(Decoded = #{time := Time, key := Key, value := Value},
State = #state{bucket = Bucket, ddb = C}) ->
State = #state{bucket = Bucket, ddb = C, res = R}) ->
KeyBin = dproto:metric_from_list(Key),
Points = mmath_bin:from_list([Value]),
C1 = dp_util:ddb_c(ddb_tcp:send(KeyBin, Time, Points, C)),
dp_index:add(Bucket, Decoded),
C1 = dp_util:ddb_c(ddb_tcp:send(KeyBin, Time div R, Points, C)),
dp_index:add(Bucket, Decoded, Time),
State#state{ddb = C1}.
......@@ -16,5 +16,5 @@ decode_metric(Metric, State = #{bucket := Bucket, ddb := C,
KeyBin = dproto:metric_from_list(Key),
Points = mmath_bin:from_list([Value]),
C1 = dp_util:ddb_c(ddb_tcp:send(KeyBin, Time div Res, Points, C)),
dp_index:add(Bucket, Metric),
dp_index:add(Bucket, Metric, Time),
State#{ddb => C1}.
......@@ -11,7 +11,7 @@
-behaviour(gen_server).
%% API
-export([start_link/0, add/2]).
-export([start_link/0, add/3]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
......@@ -21,7 +21,10 @@
-define(SERVER, ?MODULE).
-record(state, {seen = btrie:new()}).
-record(state, {
seen = btrie:new(),
last_seen_update = 10*60 % 10m
}).
%%%===================================================================
%%% API
......@@ -33,16 +36,16 @@
%%
%% @spec start_link() -> {ok, Pid} | ignore | {error, Error}
%% @end
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------x
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
add(Bucket, Metric) ->
add(Bucket, Metric, Time) ->
case erlang:process_info(whereis(?SERVER), message_queue_len) of
{message_queue_len, N} when N > 100 ->
gen_server:call(?SERVER, {tags, Bucket, Metric});
gen_server:call(?SERVER, {tags, Bucket, Metric, Time});
_ ->
gen_server:cast(?SERVER, {tags, Bucket, Metric})
gen_server:cast(?SERVER, {tags, Bucket, Metric, Time})
end.
%%%===================================================================
......@@ -77,8 +80,8 @@ init([]) ->
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call({tags, Bucket, Metric}, _From, State) ->
State1 = do_add(Bucket, Metric, State),
handle_call({tags, Bucket, Metric, Time}, _From, State) ->
State1 = do_add(Bucket, Metric, Time, State),
{reply, ok, State1};
handle_call(_Request, _From, State) ->
Reply = ok,
......@@ -94,8 +97,8 @@ handle_call(_Request, _From, State) ->
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_cast({tags, Bucket, Metric}, State) ->
State1 = do_add(Bucket, Metric, State),
handle_cast({tags, Bucket, Metric, Time}, State) ->
State1 = do_add(Bucket, Metric, Time, State),
{noreply, State1};
handle_cast(_Msg, State) ->
{noreply, State}.
......@@ -143,15 +146,18 @@ code_change(_OldVsn, State, _Extra) ->
%%% Internal functions
%%%===================================================================
do_add(Bucket, Metric = #{key := Key}, State = #state{seen = Seen}) ->
do_add(Bucket, Metric = #{key := Key}, Time, State = #state{seen = Seen, last_seen_update = TTL}) ->
KeyBin = dproto:metric_from_list(Key),
K = <<Bucket/binary, 0, KeyBin/binary>>,
case btrie:is_key(K, Seen) of
true ->
case btrie:find(K, Seen) of
{ok, Last} when Time - Last < TTL ->
State;
false ->
{ok, _Last} ->
dqe_idx:touch([{Bucket, Key, Time}]),
State#state{seen = btrie:store(K, Time, Seen)};
error ->
#{metric := MetricParts, tags := Tags} =
dp_util:expand_tags(Metric),
dqe_idx:add(Bucket, MetricParts, Bucket, Key, Tags),
State#state{seen = btrie:store(K, Seen)}
dqe_idx:add(Bucket, MetricParts, Bucket, Key, Time, Tags),
State#state{seen = btrie:store(K, Time, Seen)}
end.
......@@ -25,5 +25,5 @@ decode_metric(Metric, State = #{bucket := Bucket, ddb := C}) ->
KeyBin = dproto:metric_from_list(Key),
Points = mmath_bin:from_list([Value]),
C1 = dp_util:ddb_c(ddb_tcp:send(KeyBin, Time, Points, C)),
dp_index:add(Bucket, Metric),
dp_index:add(Bucket, Metric, Time),
State#{ddb => C1}.
......@@ -31,7 +31,7 @@ send_metrics([#{ time := T} | _ ] = Ms, State = #{ddb := C, res := R}) ->
send_metrics([M = #{time := Tin, key := Key, value := Value} | Ms],
T, Acc, State = #{bucket := Bucket, res := R})
when Tin div R =:= T ->
dp_index:add(Bucket, M),
dp_index:add(Bucket, M, Tin),
KeyBin = dproto:metric_from_list(Key),
Points = mmath_bin:from_list([Value]),
send_metrics(Ms, T, [{KeyBin, Points} | Acc], State);
......@@ -43,7 +43,7 @@ send_metrics([M = #{time := T, key := Key, value := Value} | Ms],
C3 = dp_util:ddb_c(ddb_tcp:batch_start(T, C2)),
KeyBin = dproto:metric_from_list(Key),
Points = mmath_bin:from_list([Value]),
dp_index:add(Bucket, M),
dp_index:add(Bucket, M, T),
send_metrics(Ms, T div R, [{KeyBin, Points}], State#{ddb => C3});
send_metrics([], _, Acc, State = #{ddb := C}) ->
%%lager:info("Batch size: ~p", [length(Acc)]),
......
......@@ -17,7 +17,11 @@ protocol() ->
unsnap(<<255,6,0,0, "sNaPpY", Data/binary>>) ->
<< << (unsnap_chunk(T, E))/binary >> ||
<<T, S:24/little-unsigned-integer, E:S/binary >> <= Data >>.
<<T, S:24/little-unsigned-integer, E:S/binary >> <= Data >>;
unsnap(Compressed) ->
{ok, C} = snappiest:decompress(Compressed),
C.
-define(SNAPPY_CHUNK, 16#00).
-define(UNCOMPRESSED_CHUNK, 16#01).
......@@ -43,8 +47,6 @@ unsnap_chunk(T, _) when T >= ?UNSKIPPLE_START,
T =< ?UNSKIPPLE_END ->
error(badarg).
decode(#'TimeSeries'{labels = Labels, samples = Samples}) ->
Tags = lists:sort([decode_label(Label) || Label <- Labels]),
Metric = get_name(Tags),
......
VERSION=0.3.3
COMPONENT_INTERNAL=dpx
COMPONENT=dalmatinerpx
......@@ -85,7 +85,7 @@
## Multiple urls can be specified as part of the same cluster,
## this means that only ONE of the urls will be written to each interval.
# urls = ["udp://localhost:8089"] # UDP endpoint example
urls = ["http://localhost:8087", "http://192.168.1.44:8086"] # required
urls = ["http://localhost:8087"] # required
## The target database for metrics (telegraf will create it if not exists).
database = "telegraf" # required
## Retention policy to write to.
......
REBAR = $(shell pwd)/rebar3
PKG_VSN = $(shell head -n1 rel/pkg/Makefile | sed 's/[^0-9.p]//g')
REBAR_VSN = $(shell erl -noshell -eval '{ok, F} = file:consult("rebar.config"), [{release, {_, Vsn}, _}] = [O || {relx, [O | _]} <- F], io:format("~s", [Vsn]), init:stop().')
VARS_VSN = $(shell grep 'bugsnag_app_version' rel/vars.config | sed -e 's/.*,//' -e 's/[^0-9.p]//g' -e 's/[.]$$//')
APP_VSN = $(shell grep vsn apps/$(APP)/src/$(APP).app.src | sed 's/[^0-9.p]//g')
include config.mk
compile: $(REBAR) .git/hooks/pre-commit
$(REBAR) compile
......@@ -31,13 +32,16 @@ $(REBAR):
cp `which rebar3` $(REBAR)
upgrade: $(REBAR)
$(REBAR) upgrade
make tree
$(REBAR) upgrade
$(MAKE) tree
update: $(REBAR)
$(REBAR) update
tree: $(REBAR)
rebar.lock: rebar.config $(REBAR)
$(REBAR) compile
tree: $(REBAR) rebar.lock
$(REBAR) tree | grep -v '=' | sed 's/ (.*//' > tree
tree-diff: tree
......@@ -46,6 +50,64 @@ tree-diff: tree
update-fifo.mk:
cp _build/default/lib/fifo_utils/priv/fifo.mk .
###
### Packaging
###
uname_S := $(shell sh -c 'uname -s 2>/dev/null || echo not')
uname_V6 := $(shell sh -c 'uname -v 2>/dev/null | cut -c-6 || echo not')
ifeq ($(uname_S),Darwin)
PLATFORM = darwin
REBARPROFILE = darwin
export REBARPROFILE
endif
ifeq ($(uname_S),FreeBSD)
PLATFORM = freebsd
REBARPROFILE = freebsd
export REBARPROFILE
endif
ifeq ($(uname_V6),joyent)
PLATFORM = smartos
REBARPROFILE = smartos
export REBARPROFILE
endif
dist: ${PLATFORM} ;
generic/rel: version_header
$(REBAR) as ${REBARPROFILE} compile
$(REBAR) as ${REBARPROFILE} release
freebsd: ${PLATFORM}/rel
$(MAKE) -C rel/pkgng package
smartos: ${PLATFORM}/rel
$(MAKE) -C rel/pkg package
darwin: ${PLATFORM}/rel
freebsd/rel: generic/rel
smartos/rel: generic/rel
darwin/rel: generic/rel
dist-help:
@echo "FiFo dist tool"
@echo "You are running this on: ${PLATFORM}"
@echo
@echo "Currently supported platforms are: FreeBSD, SmartOS, Darwin/OSX"
@echo
@echo "SmartOS:"
@echo " rebar profile: smartos $(shell if grep profiles -A12 rebar.config | grep smartos > /dev/null; then echo OK; else echo MISSING; fi)"
@echo " packaging makefile: rel/pkg/Makefile $(shell if [ -f rel/pkg/Makefile ]; then echo OK; else echo MISSING; fi)"
@echo "FreeBSD:"
@echo " rebar profile: freebsd $(shell if grep profiles -A12 rebar.config | grep freebsd > /dev/null; then echo OK; else echo MISSING; fi)"
@echo " packaging makefile: rel/pkgng/Makefile $(shell if [ -f rel/pkgng/Makefile ]; then echo OK; else echo MISSING; fi)"
@echo "Darwin:"
@echo " rebar profile: darwin $(shell if grep profiles -A12 rebar.config | grep darwin > /dev/null; then echo OK; else echo MISSING; fi)"
@echo " packaging makefile: - no packaing -"
###
### Docs
......@@ -60,8 +122,8 @@ docs:
build-vsn:
@echo "$(REBAR_VSN)"
vsn:
@echo "## rel/pkg/Makefile"
@echo "$(PKG_VSN)"
@echo "## Config:"
@echo "$(VERSION)"
@echo "## apps/$(APP)/src/$(APP).app.src"
@echo "$(APP_VSN)"
@echo "## rebar.config"
......@@ -70,7 +132,7 @@ vsn:
@echo "$(VARS_VSN)"
test-vsn:
@echo "Testing against package version: $(REBAR_VSN)"
@[ "$(REBAR_VSN)" = "$(APP_VSN)" ] && echo " - App version ok: $(APP_VSN)" || (echo "App version out of date" && false)
@[ "$(REBAR_VSN)" = "$(PKG_VSN)" ] && echo " - Package version ok: $(PKG_VSN)" || (echo "Package version out of date" && false)
@[ "$(REBAR_VSN)" = "$(VARS_VSN)" ] && echo " - Vars version ok: $(VARS_VSN)" || (echo "Vars version out of date" && false)
@echo "Testing against package version: $(VERSION)"
@[ "$(VERSION)" = "$(APP_VSN)" ] && echo " - App version ok: $(APP_VSN)" || (echo "App version out of date" && false)
@[ "$(VERSION)" = "$(REBAR_VSN)" ] && echo " - Rebar version ok: $(REBAR_VSN)" || (echo "Package version out of date" && false)
@[ "$(VERSION)" = "$(VARS_VSN)" ] && echo " - Vars version ok: $(VARS_VSN)" || (echo "Vars version out of date" && false)
......@@ -20,9 +20,9 @@
{jsone, "~>1.2.3"},
{hackney, "~>1.6.0"},
{mmath, "~>0.2.9"},
{dqe_idx, "~>0.3.0"},
{dqe_idx, "~>0.4.0"},
{dp_decoder, "~>0.2.6"},
{dqe_idx_pg, "~>0.4.0"},
{dqe_idx_pg, "~>0.5.0"},
{ddb_connection, "~>0.4.2"}
]}.
......@@ -30,7 +30,7 @@
%% Plugins
%%-------------------------------------------------------------------
{project_plugins, [{rebar3_cuttlefish, "0.14.0"}]}.
{project_plugins, [{rebar3_cuttlefish, "~>0.16.0"}]}.
{plugins, [rebar3_gpb_plugin]}.
%%-------------------------------------------------------------------
......@@ -38,7 +38,7 @@
%%-------------------------------------------------------------------
{cuttlefish,
[{file_name, "dalmatinerpx.conf.example"},
[{file_name, "dpx.conf"},
{schema_discovery, false}]}.
%%-------------------------------------------------------------------
......@@ -46,8 +46,10 @@
%%-------------------------------------------------------------------
{profiles,
[{prod, [{relx, [{dev_mode, false},
{include_erts, true}]}]},
[
{darwin, [{relx, [{dev_mode, false}, {include_erts, true}]}]},
{smartos, [{relx, [{dev_mode, false}, {include_erts, true}]}]},
{freebsd, [{relx, [{dev_mode, false}, {include_erts, true}]}]},
{lint, [{plugins, [rebar3_lint]}]},
{deb, [{relx, [{dev_mode, false},
{overlay_vars, "rel/vars/deb.config"},
......@@ -59,7 +61,7 @@
%%-------------------------------------------------------------------
{relx,
[{release, {dalmatinerpx, "0.3.2p6"},
[{release, {dpx, "0.3.3"},
[ddb_proxy,
{recon, load},
sasl]},
......
......@@ -2,79 +2,79 @@
[{<<"certifi">>,{pkg,<<"certifi">>,<<"1.0.0">>},1},
{<<"cowboy">>,{pkg,<<"cowboy">>,<<"1.1.2">>},0},
{<<"cowlib">>,{pkg,<<"cowlib">>,<<"1.0.2">>},1},
{<<"ddb_client">>,{pkg,<<"ddb_client">>,<<"0.5.5">>},1},
{<<"ddb_connection">>,{pkg,<<"ddb_connection">>,<<"0.4.2">>},0},
{<<"ddb_client">>,{pkg,<<"ddb_client">>,<<"0.5.7">>},1},
{<<"ddb_connection">>,{pkg,<<"ddb_connection">>,<<"0.4.3">>},0},
{<<"dp_decoder">>,{pkg,<<"dp_decoder">>,<<"0.2.16">>},0},
{<<"dproto">>,{pkg,<<"dproto">>,<<"0.5.2">>},2},
{<<"dqe_idx">>,{pkg,<<"dqe_idx">>,<<"0.3.0">>},0},
{<<"dqe_idx_pg">>,{pkg,<<"dqe_idx_pg">>,<<"0.4.3">>},0},
{<<"dproto">>,{pkg,<<"dproto">>,<<"0.5.4">>},2},
{<<"dqe_idx">>,{pkg,<<"dqe_idx">>,<<"0.4.3">>},0},
{<<"dqe_idx_pg">>,{pkg,<<"dqe_idx_pg">>,<<"0.5.7">>},0},
{<<"dynamic_compile">>,{pkg,<<"dynamic_compile">>,<<"1.0.0">>},2},
{<<"epgsql">>,{pkg,<<"epgsql">>,<<"3.3.0">>},2},
{<<"fifo_lager">>,{pkg,<<"fifo_lager">>,<<"0.1.4">>},0},
{<<"fifo_utils">>,{pkg,<<"fifo_utils">>,<<"0.1.37">>},0},
{<<"goldrush">>,{pkg,<<"goldrush">>,<<"0.1.9">>},1},
{<<"fifo_lager">>,{pkg,<<"fifo_lager">>,<<"0.1.8">>},0},
{<<"fifo_utils">>,{pkg,<<"fifo_utils">>,<<"0.1.51">>},0},
{<<"goldrush">>,{pkg,<<"goldrush">>,<<"0.1.9">>},2},
{<<"hackney">>,{pkg,<<"hackney">>,<<"1.6.6">>},0},
{<<"ibrowse">>,{pkg,<<"ibrowse">>,<<"4.4.0">>},2},
{<<"idna">>,{pkg,<<"idna">>,<<"4.0.0">>},1},
{<<"jsone">>,{pkg,<<"jsone">>,<<"1.2.6">>},0},
{<<"jsx">>,{pkg,<<"jsx">>,<<"2.8.0">>},2},
{<<"jsx">>,{pkg,<<"jsx">>,<<"2.8.2">>},2},
{<<"jsxd">>,{pkg,<<"jsxd">>,<<"0.2.4">>},3},
{<<"lager">>,{pkg,<<"lager">>,<<"3.2.4">>},0},
{<<"lager_graylog">>,{pkg,<<"lager_graylog">>,<<"0.1.1">>},1},
{<<"lager">>,{pkg,<<"lager">>,<<"3.2.4">>},1},
{<<"lager_graylog">>,{pkg,<<"lager_graylog">>,<<"0.1.3">>},1},
{<<"lager_logstash_backend">>,
{pkg,<<"lager_logstash_backend">>,<<"0.1.1">>},
{pkg,<<"lager_logstash_backend">>,<<"0.1.3">>},
1},
{<<"metrics">>,{pkg,<<"metrics">>,<<"1.0.1">>},1},
{<<"mimerl">>,{pkg,<<"mimerl">>,<<"1.0.2">>},1},
{<<"mmath">>,{pkg,<<"mmath">>,<<"0.2.17">>},0},
{<<"otters">>,{pkg,<<"otters">>,<<"0.2.8">>},1},
{<<"mmath">>,{pkg,<<"mmath">>,<<"0.2.20">>},0},
{<<"otters">>,{pkg,<<"otters">>,<<"0.2.10">>},1},
{<<"pgapp">>,{pkg,<<"pgapp">>,<<"0.0.2">>},1},
{<<"poolboy">>,{pkg,<<"poolboy">>,<<"1.5.1">>},1},
{<<"quickrand">>,{pkg,<<"quickrand">>,<<"1.5.4">>},2},
{<<"quickrand">>,{pkg,<<"quickrand">>,<<"1.7.2">>},1},
{<<"ranch">>,{pkg,<<"ranch">>,<<"1.3.2">>},0},
{<<"recon">>,{pkg,<<"recon">>,<<"2.3.2">>},0},
{<<"snappiest">>,{pkg,<<"snappiest">>,<<"1.2.0">>},0},
{<<"sqlmig">>,{pkg,<<"sqlmig">>,<<"0.1.4">>},1},
{<<"sqlmig">>,{pkg,<<"sqlmig">>,<<"0.1.5">>},1},
{<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.1">>},1},
{<<"trie">>,{pkg,<<"trie">>,<<"1.7.0">>},0},
{<<"uuid">>,{pkg,<<"uuid_erl">>,<<"1.5.4">>},1}]}.
{<<"trie">>,{pkg,<<"trie">>,<<"1.7.2">>},0},
{<<"uuid">>,{pkg,<<"uuid_erl">>,<<"1.7.2">>},1}]}.
[
{pkg_hash,[
{<<"certifi">>, <<"1C787A85B1855BA354F0B8920392C19AA1D06B0EE1362F9141279620A5BE2039">>},
{<<"cowboy">>, <<"61AC29EA970389A88ECA5A65601460162D370A70018AFE6F949A29DCA91F3BB0">>},
{<<"cowlib">>, <<"9D769A1D062C9C3AC753096F868CA121E2730B9A377DE23DEC0F7E08B1DF84EE">>},
{<<"ddb_client">>, <<"86DDDCE3B379DD3B0B425299DCE86CA49AB00DE8B59DE876EF3F1FA780F39F11">>},
{<<"ddb_connection">>, <<"D59222829EE5DFF89690AAD2E26D422C30BB14F1AA5A61A7921BF02D58735DB8">>},
{<<"ddb_client">>, <<"1BC5836D3EB6786778F6DD202B64D4649DAA44C8882FA2001BB22C1CA17C72E1">>},
{<<"ddb_connection">>, <<"B74F010840917CFAF6088391DCC9B91A1036213E4DDF2B58B4F7FBC00AEA01C2">>},
{<<"dp_decoder">>, <<"F08B30D5E53A8F47DD57AB1B48D81519685C9269B8FD59755EBA7E94EBA4F297">>},
{<<"dproto">>, <<"D1C9929353589BD395CAB3E5C6E1CFC4DC8B0660527145E2DD221771D4467ABD">>},
{<<"dqe_idx">>, <<"EDCA91E5130C532D4B33AF678197ADC25AA4FA7471AC75033290F8AD8392874D">>},
{<<"dqe_idx_pg">>, <<"FC95D7A8AB1D54F496AA47633FB793209A59D63800DCE84191804E56997DB01D">>},
{<<"dproto">>, <<"396436EB3593AD0CF2896001ACAFF0B60DA9990078F0367A2682D22E0FEB82EE">>},
{<<"dqe_idx">>, <<"275F45869B1791BE6679D6B4A250C72CF43636CB4B3F2213E5D4AA6F8E0F1CF2">>},
{<<"dqe_idx_pg">>, <<"E6B24FEF5DD8F323FA14F67DB76DE15C0636B579E0CE6C5A3968CAD5EC3C0712">>},
{<<"dynamic_compile">>, <<"8171B2CB4953EA3ED2EF63F5B26ABF677ACD0CA32210C2A08A7A8406A743F76B">>},
{<<"epgsql">>, <<"974A578340E52012CBAB820CE756E7ED1DF1BAF0110C59A6753D8337A2CF9454">>},
{<<"fifo_lager">>, <<"9374233205CFC02F837FEFA28209673D7DAD1BC2ADE8534742951C34B6278817">>},
{<<"fifo_utils">>, <<"34EC0F222281449D7BE9B057DC0D53258DB477DC9A3D33DD1BD1766479E90010">>},
{<<"fifo_lager">>, <<"037C26D77A5E27ECC158889A73CE745111B9291A0DE32CCD0A69E8C1514586A9">>},
{<<"fifo_utils">>, <<"2E1DA92C4C63CB36CF1325FBCF16F20B9781FCBA362CD86CAD300F35628D5E00">>},
{<<"goldrush">>, <<"F06E5D5F1277DA5C413E84D5A2924174182FB108DABB39D5EC548B27424CD106">>},
{<<"hackney">>, <<"5564B4695D48FD87859E9DF77A7FA4B4D284D24519F0CD7CC898F09E8FBDC8A3">>},
{<<"ibrowse">>, <<"2D923325EFE0D2CB09B9C6A047B2835A5EDA69D8A47ED6FF8BC03628B764E991">>},
{<<"idna">>, <<"10AAA9F79D0B12CF0DEF53038547855B91144F1BFCC0EC73494F38BB7B9C4961">>},
{<<"jsone">>, <<"3EED1BC3F34D5727A011AB84A20230F4BDD8BB2B9C07C3AD0DCF412410231A74">>},
{<<"jsx">>, <<"749BEC6D205C694AE1786D62CEA6CC45A390437E24835FD16D12D74F07097727">>},
{<<"jsx">>, <<"7ACC7D785B5ABE8A6E9ADBDE926A24E481F29956DD8B4DF49E3E4E7BCC92A018">>},
{<<"jsxd">>, <<"C14114AFCA463F2D03D3FB6CC81FD51CDA8CA86A47E5AC3ABDF0CA572A73A413">>},
{<<"lager">>, <<"A6DEB74DAE7927F46BD13255268308EF03EB206EC784A94EAF7C1C0F3B811615">>},
{<<"lager_graylog">>, <<"2C3C11A7BCAB6135F0113DF71E26742B8A301AD2D2A67917526CDD20F90898F9">>},
{<<"lager_logstash_backend">>, <<"66405FEC3223C13D94C3112933D4234FC7BF8C7DB77A1C9C162183AB5C044F73">>},
{<<"lager_graylog">>, <<"B81F6CC71198F0CF113E74395A12F9C87E22F7B57F5E484A4802413E2A5B8F2C">>},
{<<"lager_logstash_backend">>, <<"86E7FBE08B34DAF9341E9FC397EADFCD7C1ABA0A0EA901FA7A4C454A3C4C335C">>},
{<<"metrics">>, <<"25F094DEA2CDA98213CECC3AEFF09E940299D950904393B2A29D191C346A8486">>},
{<<"mimerl">>, <<"993F9B0E084083405ED8252B99460C4F0563E41729AB42D9074FD5E52439BE88">>},
{<<"mmath">>, <<"DFD52637B19F1EEF6B0AAAC473CAC2FC27A7190B6A6420454A917423527F3030">>},
{<<"otters">>, <<"4D28D810E3311E9BC845FBA20EBBE88CFF4AC26320B56A277603DDE134F24DC1">>},
{<<"mmath">>, <<"C07688D60E31C0794E2C50F8FE029747DE24A4E711A294169C69CCB3ACCB1294">>},
{<<"otters">>, <<"31A7B47D50E14B15CAE380D060F666A54A2D79688BCC29D4CCB70E2FE05B0EA8">>},
{<<"pgapp">>, <<"3E104BB777C8455D8B26D1538B67ABE0188EE97B1DF973FD936C2204CB316196">>},
{<<"poolboy">>, <<"6B46163901CFD0A1B43D692657ED9D7E599853B3B21B95AE5AE0A777CF9B6CA8">>},
{<<"quickrand">>, <<"47ADD4755CC5F209CBEFFD6F47C84061196CD7FAD99FD8FD12418EB0D06B939D">>},
{<<"quickrand">>, <<"E856F3C69FEC00D1ACCA8E56CB452B650E838D3A9720811410F439121EFAFE59">>},
{<<"ranch">>, <<"E4965A144DC9FBE70E5C077C65E73C57165416A901BD02EA899CFD95AA890986">>},
{<<"recon">>, <<"4444C879BE323B1B133EEC5241CB84BD3821EA194C740D75617E106BE4744318">>},
{<<"snappiest">>, <<"25706FEBB5ECAEA900D879A89C6D967C8D1BF700F8546BEBD0DEA514A8CCBFB7">>},
{<<"sqlmig">>, <<"73163B3842A81EEBBF78A38D5FA03D2430863AE79DACCF588CD9EFFD48470A6C">>},
{<<"sqlmig">>, <<"8208D222A9335C1B1171F4FD1CE4150CF28B1FDF37CA9A66715AC434ED9B9AF4">>},
{<<"ssl_verify_fun">>, <<"28A4D65B7F59893BC2C7DE786DEC1E1555BD742D336043FE644AE956C3497FBE">>},
{<<"trie">>, <<"E022B7B33338957BCE17BFD52B6BFE0840F2269E89DAB0C00C01C6CD7FCBEECC">>},
{<<"uuid">>, <<"06240EFF3EAF013CC22274689B0C1FF6345645EE5D47A9E4ED7545EEB7698B23">>}]}
{<<"trie">>, <<"CF3779ACF42DE76F8A9B74517E46449D3D31A827ED7A7C287B3A534945966237">>},
{<<"uuid">>, <<"D596C8DD01A4AE48B9D8D51832CCC8F8302BF67ACD01336AEC3FCFAE6B9D2BC2">>}]}
].
No preview for this file type
......@@ -13,7 +13,7 @@ if [[ $(id dalmatinerproxy 2>/dev/null) ]] ;then
chown -R dalmatinerpx:dalmatinerpx /data/dalmatinerpx
fi
CONFFILE=/data/dalmatinerpx/etc/dalmatinerpx.conf
CONFFILE=/data/dalmatinerpx/etc/dpx.conf
if [ ! -f "${CONFFILE}" ]
then
......
#!/bin/sh
#
# $FreeBSD$
#
# PROVIDE: dalmatinerpx
# REQUIRE: LOGIN
# KEYWORD: shutdown
. /etc/rc.subr
name="dalmatinerpx"
rcvar=dalmatinerpx_enable
load_rc_config $name
: ${dalmatinerpx="NO"}
start_cmd="${name}_start"
stop_cmd="${name}_stop"
dalmatinerpx_start()
{
if checkyesno ${rcvar}; then
echo "* starting dalmatiner db... "
/usr/local/bin/sudo -u dalmatinerpx /usr/local/lib/dpx/bin/dpx start
fi
}
dalmatinerpx_stop()
{
if checkyesno ${rcvar}; then
echo "* stopping dalmatiner db... "
/usr/local/bin/sudo -u dalmatinerpx /usr/local/lib/dpx/bin/dpx stop
fi
}
run_rc_command "$1"
%%
%% etc/vm.args
%%
{run_user, "dalmatinerdb"}.
\ No newline at end of file
VERSION=0.3.2p6
COMPONENT_INTERNAL=dalmatinerpx
COMPONENT=dalmatinerpx
DEPS="erlang" "coreutils" "sudo"
DEPS="erlang" "coreutils" "sudo" "grep" "zlib" "bzip2" "gcc49-libs"
include ../../_build/default/lib/fifo_utils/priv/pkg.mk
include ../../config.mk
include ../../_build/${REBARPROFILE}/lib/fifo_utils/priv/pkg.mk
.PHONY: prepare
.PHONY: package prepare clean
prepare:
-rm -r $(STAGE_DIR)/$(COMPONENT)
cp -r ../../_build/prod/rel/$(COMPONENT_INTERNAL) $(STAGE_DIR)/$(COMPONENT)
cp -r ../../_build/${REBARPROFILE}/rel/$(COMPONENT_INTERNAL) $(STAGE_DIR)/$(COMPONENT)
rm -rf $(STAGE_DIR)/$(COMPONENT)/lib/*/c_src
package: prepare $(FILE).tgz
clean: clean-pkg
-rm *.tgz
......@@ -5,7 +5,7 @@
| |
| Please have a look at the configuration files: |
| |
| * /data/dalmatinerpx/etc/dalmatinerpx.conf |
| * /data/dalmatinerpx/etc/dpx.conf |