mirror of
https://github.com/clearml/wexpect-venv
synced 2025-06-26 18:15:52 +00:00
[FIX] logging, now all logs generated into .wlog dir; [ADD] searcher_string class
This commit is contained in:
parent
46a206ae30
commit
3ea1346012
@ -82,9 +82,11 @@ def init_logger():
|
||||
logger_filename = os.environ['WEXPECT_LOGGER_FILENAME']
|
||||
except KeyError:
|
||||
pid = os.getpid()
|
||||
logger_filename = f'wexpect_{pid}'
|
||||
logger_filename = f'./.wlog/wexpect_{pid}'
|
||||
logger.setLevel(logger_level)
|
||||
fh = logging.FileHandler(f'{logger_filename}.log', 'w', 'utf-8')
|
||||
logger_filename = f'{logger_filename}.log'
|
||||
os.makedirs(os.path.dirname(logger_filename), exist_ok=True)
|
||||
fh = logging.FileHandler(logger_filename, 'w', 'utf-8')
|
||||
formatter = logging.Formatter('%(asctime)s - %(filename)s::%(funcName)s - %(levelname)s - %(message)s')
|
||||
fh.setFormatter(formatter)
|
||||
logger.addHandler(fh)
|
||||
@ -142,10 +144,6 @@ class ConsoleReaderBase:
|
||||
except Exception as e:
|
||||
logger.info(e)
|
||||
return
|
||||
|
||||
time.sleep(.2)
|
||||
self.write('ls')
|
||||
self.write(os.linesep)
|
||||
|
||||
paused = False
|
||||
|
||||
@ -186,7 +184,7 @@ class ConsoleReaderBase:
|
||||
finally:
|
||||
time.sleep(.1)
|
||||
self.send_to_host(self.readConsoleToCursor())
|
||||
time.sleep(1)
|
||||
time.sleep(.1)
|
||||
self.close_connection()
|
||||
|
||||
def write(self, s):
|
||||
@ -483,4 +481,4 @@ class ConsoleReaderPipe(ConsoleReaderBase):
|
||||
ret = resp[1]
|
||||
return ret
|
||||
|
||||
|
||||
|
||||
|
@ -983,6 +983,103 @@ class searcher_re (object):
|
||||
self.match = the_match
|
||||
self.end = self.match.end()
|
||||
return best_index
|
||||
|
||||
|
||||
class searcher_string (object):
|
||||
|
||||
"""This is a plain string search helper for the spawn.expect_any() method.
|
||||
|
||||
Attributes:
|
||||
|
||||
eof_index - index of EOF, or -1
|
||||
timeout_index - index of TIMEOUT, or -1
|
||||
|
||||
After a successful match by the search() method the following attributes
|
||||
are available:
|
||||
|
||||
start - index into the buffer, first byte of match
|
||||
end - index into the buffer, first byte after match
|
||||
match - the matching string itself
|
||||
"""
|
||||
|
||||
def __init__(self, strings):
|
||||
|
||||
"""This creates an instance of searcher_string. This argument 'strings'
|
||||
may be a list; a sequence of strings; or the EOF or TIMEOUT types. """
|
||||
|
||||
self.eof_index = -1
|
||||
self.timeout_index = -1
|
||||
self._strings = []
|
||||
for n, s in zip(list(range(len(strings))), strings):
|
||||
if s is EOF:
|
||||
self.eof_index = n
|
||||
continue
|
||||
if s is TIMEOUT:
|
||||
self.timeout_index = n
|
||||
continue
|
||||
self._strings.append((n, s))
|
||||
|
||||
def __str__(self):
|
||||
|
||||
"""This returns a human-readable string that represents the state of
|
||||
the object."""
|
||||
|
||||
ss = [ (ns[0],' %d: "%s"' % ns) for ns in self._strings ]
|
||||
ss.append((-1,'searcher_string:'))
|
||||
if self.eof_index >= 0:
|
||||
ss.append ((self.eof_index,' %d: EOF' % self.eof_index))
|
||||
if self.timeout_index >= 0:
|
||||
ss.append ((self.timeout_index,' %d: TIMEOUT' % self.timeout_index))
|
||||
ss.sort()
|
||||
ss = list(zip(*ss))[1]
|
||||
return '\n'.join(ss)
|
||||
|
||||
def search(self, buffer, freshlen, searchwindowsize=None):
|
||||
|
||||
"""This searches 'buffer' for the first occurence of one of the search
|
||||
strings. 'freshlen' must indicate the number of bytes at the end of
|
||||
'buffer' which have not been searched before. It helps to avoid
|
||||
searching the same, possibly big, buffer over and over again.
|
||||
|
||||
See class spawn for the 'searchwindowsize' argument.
|
||||
|
||||
If there is a match this returns the index of that string, and sets
|
||||
'start', 'end' and 'match'. Otherwise, this returns -1. """
|
||||
|
||||
absurd_match = len(buffer)
|
||||
first_match = absurd_match
|
||||
|
||||
# 'freshlen' helps a lot here. Further optimizations could
|
||||
# possibly include:
|
||||
#
|
||||
# using something like the Boyer-Moore Fast String Searching
|
||||
# Algorithm; pre-compiling the search through a list of
|
||||
# strings into something that can scan the input once to
|
||||
# search for all N strings; realize that if we search for
|
||||
# ['bar', 'baz'] and the input is '...foo' we need not bother
|
||||
# rescanning until we've read three more bytes.
|
||||
#
|
||||
# Sadly, I don't know enough about this interesting topic. /grahn
|
||||
|
||||
for index, s in self._strings:
|
||||
if searchwindowsize is None:
|
||||
# the match, if any, can only be in the fresh data,
|
||||
# or at the very end of the old data
|
||||
offset = -(freshlen+len(s))
|
||||
else:
|
||||
# better obey searchwindowsize
|
||||
offset = -searchwindowsize
|
||||
n = buffer.find(s, offset)
|
||||
if n >= 0 and n < first_match:
|
||||
first_match = n
|
||||
best_index, best_match = index, s
|
||||
if first_match == absurd_match:
|
||||
return -1
|
||||
self.match = best_match
|
||||
self.start = first_match
|
||||
self.end = self.start + len(self.match)
|
||||
return best_index
|
||||
|
||||
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user