Checkpoint, updates for Nim 2.0.

FossilOrigin-Name: be37ceda30c70ca3480cc04749238e5f78e719f86f7e30ad7ed2429a93f77d36
This commit is contained in:
Mahlon E. Smith 2023-10-13 20:21:03 +00:00
parent 138bca5b41
commit 673a9a0353
3 changed files with 10 additions and 17 deletions

View file

@ -9,6 +9,6 @@ bin = @["sieb"]
# Dependencies
requires "nim >= 1.5.0"
requires "yaml >= 1.1"
requires "nim >= 2.0.0"
requires "yaml >= 2.0"

View file

@ -13,8 +13,7 @@ import
std/os,
std/streams,
std/tables,
yaml/parser,
yaml/serialization
yaml/loading
import util
@ -57,7 +56,7 @@ proc parse( path: string ): Config =
let stream = newFileStream( path )
try:
stream.load( result )
except YamlParserError as err:
except YamlLoadingError as err:
debug err.msg
return Config() # return empty default, it could be "half parsed"
except YamlConstructionError as err:

View file

@ -14,6 +14,7 @@ import
std/osproc,
std/posix,
std/re,
std/selectors,
std/streams,
std/strformat,
std/strutils,
@ -105,7 +106,7 @@ proc newMessage*( dir: Maildir ): Message =
let now = getTime()
var hostname = newString(256)
discard getHostname( hostname, 256 )
discard getHostname( cstring(hostname), 256 )
hostname.setLen( cstring(hostname).len )
msgcount = msgcount + 1
@ -179,21 +180,14 @@ proc filter*( orig_msg: Message, cmd: seq[string] ): Message =
# Read from the original message, write to the filter
# process in chunks.
#
"*** STARTING PIPE TO PROCESS".debug
var t = 0
# FIXME: I think I'm a victim of the kernel buffer filling up
# with large messages. There are numerous posts regarding this.
# Might need to go lower level than "streams" with the child process.
#
orig_msg.open
while not orig_msg.stream.atEnd:
buf = orig_msg.stream.readStr( BUFSIZE )
t = t + BUFSIZE
process.inputStream.write( buf )
process.inputStream.flush
echo "wrote ", t
"*** DONE PIPING TO PROCESS".debug # FIXME: hangs on large messages
# FIXME: Need to start reading from process with
# the selector when it becomes readable
# Read from the filter process until EOF, send to the
# new message in chunks.