Updates for nim 0.19.
This commit is contained in:
parent
d3e020362c
commit
7e44181f45
1 changed files with 9 additions and 9 deletions
|
|
@ -35,7 +35,7 @@ import
|
||||||
nativesockets,
|
nativesockets,
|
||||||
net,
|
net,
|
||||||
os,
|
os,
|
||||||
parseopt2,
|
parseopt,
|
||||||
posix,
|
posix,
|
||||||
strutils,
|
strutils,
|
||||||
tables,
|
tables,
|
||||||
|
|
@ -44,7 +44,7 @@ import
|
||||||
|
|
||||||
|
|
||||||
const
|
const
|
||||||
VERSION = "v0.1.1"
|
VERSION = "v0.2.0"
|
||||||
USAGE = """
|
USAGE = """
|
||||||
./netdata_tsrelay [-q][-v][-h] --dbopts="[PostgreSQL connection string]" --listen-port=14866 --listen-addr=0.0.0.0
|
./netdata_tsrelay [-q][-v][-h] --dbopts="[PostgreSQL connection string]" --listen-port=14866 --listen-addr=0.0.0.0
|
||||||
|
|
||||||
|
|
@ -94,13 +94,13 @@ proc hl( msg: string, fg: ForegroundColor, bright=false ): string =
|
||||||
proc fetch_data( client: Socket ): string =
|
proc fetch_data( client: Socket ): string =
|
||||||
## Netdata JSON backend doesn't send a length, so we read line by
|
## Netdata JSON backend doesn't send a length, so we read line by
|
||||||
## line and wait for stream timeout to determine a "sample".
|
## line and wait for stream timeout to determine a "sample".
|
||||||
var buf: string = nil
|
var buf: string = ""
|
||||||
try:
|
try:
|
||||||
result = client.recv_line( timeout=conf.timeout )
|
result = client.recv_line( timeout=conf.timeout )
|
||||||
if result != "" and not result.is_nil: result = result & "\n"
|
if result != "": result = result & "\n"
|
||||||
while buf != "":
|
while buf != "":
|
||||||
buf = client.recv_line( timeout=conf.timeout )
|
buf = client.recv_line( timeout=conf.timeout )
|
||||||
if buf != "" and not buf.is_nil: result = result & buf & "\n"
|
if buf != "": result = result & buf & "\n"
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
discard
|
discard
|
||||||
|
|
||||||
|
|
@ -109,13 +109,13 @@ proc parse_data( data: string ): seq[ JsonNode ] =
|
||||||
## Given a raw +data+ string, parse JSON and return a sequence
|
## Given a raw +data+ string, parse JSON and return a sequence
|
||||||
## of JSON samples. Netdata can buffer multiple samples in one batch.
|
## of JSON samples. Netdata can buffer multiple samples in one batch.
|
||||||
result = @[]
|
result = @[]
|
||||||
if data == "" or data.is_nil: return
|
if data == "": return
|
||||||
|
|
||||||
# Hash of sample timeperiods to pivoted json data
|
# Hash of sample timeperiods to pivoted json data
|
||||||
var pivoted_data = init_table[ BiggestInt, JsonNode ]()
|
var pivoted_data = init_table[ BiggestInt, JsonNode ]()
|
||||||
|
|
||||||
for sample in split_lines( data ):
|
for sample in split_lines( data ):
|
||||||
if sample == "" or sample.is_nil: continue
|
if sample == "": continue
|
||||||
if conf.debug: echo sample.hl( fgBlack, bright=true )
|
if conf.debug: echo sample.hl( fgBlack, bright=true )
|
||||||
|
|
||||||
var parsed: JsonNode
|
var parsed: JsonNode
|
||||||
|
|
@ -130,7 +130,7 @@ proc parse_data( data: string ): seq[ JsonNode ] =
|
||||||
#
|
#
|
||||||
var pivot: JsonNode
|
var pivot: JsonNode
|
||||||
try:
|
try:
|
||||||
let key = parsed[ "timestamp" ].get_num
|
let key = parsed[ "timestamp" ].get_int
|
||||||
|
|
||||||
if pivoted_data.has_key( key ):
|
if pivoted_data.has_key( key ):
|
||||||
pivot = pivoted_data[ key ]
|
pivot = pivoted_data[ key ]
|
||||||
|
|
@ -159,7 +159,7 @@ proc write_to_database( samples: seq[ JsonNode ] ): void =
|
||||||
db.exec sql( "BEGIN" )
|
db.exec sql( "BEGIN" )
|
||||||
for sample in samples:
|
for sample in samples:
|
||||||
var
|
var
|
||||||
timestamp = sample[ "timestamp" ].get_num
|
timestamp = sample[ "timestamp" ].get_int
|
||||||
host = sample[ "hostname" ].get_str.to_lowerascii
|
host = sample[ "hostname" ].get_str.to_lowerascii
|
||||||
sample.delete( "timestamp" )
|
sample.delete( "timestamp" )
|
||||||
sample.delete( "hostname" )
|
sample.delete( "hostname" )
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue