Skip to content

Commit f273708

Browse files
committed
Python 3.x compatibility for test_internals_v2; removed record_writer recordings test code due to non-portability
1 parent 1a1faf0 commit f273708

File tree

1 file changed

+12
-79
lines changed

1 file changed

+12
-79
lines changed

tests/searchcommands/test_internals_v2.py

Lines changed: 12 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -26,14 +26,14 @@
2626
except ImportError:
2727
from splunklib.ordereddict import OrderedDict
2828
from collections import namedtuple, deque
29-
from splunklib.six.moves import StringIO as StringIO
29+
from splunklib.six import BytesIO as BytesIO
3030
from functools import wraps
3131
from glob import iglob
3232
from itertools import chain
3333
from splunklib.six.moves import filter as ifilter
3434
from splunklib.six.moves import map as imap
3535
from splunklib.six.moves import zip as izip
36-
from sys import float_info, maxunicode
36+
from sys import float_info
3737
from tempfile import mktemp
3838
from time import time
3939
from types import MethodType
@@ -61,6 +61,9 @@
6161

6262
max_length = 1 * 1024
6363

64+
# generate only non-wide Unicode characters, as in Python 2, to prevent surrogate values
65+
MAX_NARROW_UNICODE = 0xD800 - 1
66+
6467

6568
def random_bytes():
6669
return os.urandom(random.randint(0, max_length))
@@ -98,7 +101,7 @@ def random_list(population, *args):
98101

99102

100103
def random_unicode():
101-
return ''.join(imap(lambda x: six.unichr(x), random.sample(range(maxunicode), random.randint(0, max_length))))
104+
return ''.join(imap(lambda x: six.unichr(x), random.sample(range(MAX_NARROW_UNICODE), random.randint(0, max_length))))
102105

103106
# endregion
104107

@@ -132,15 +135,15 @@ def test_recorder(self):
132135

133136
with gzip.open(recording + 'input.gz', 'rb') as file_1:
134137
with io.open(recording + 'output', 'rb') as file_2:
135-
ifile = StringIO(file_1.read())
136-
result = StringIO(file_2.read())
138+
ifile = BytesIO(file_1.read())
139+
result = BytesIO(file_2.read())
137140

138141
# Set up the input/output recorders that are under test
139142

140143
ifile = Recorder(mktemp(), ifile)
141144

142145
try:
143-
ofile = Recorder(mktemp(), StringIO())
146+
ofile = Recorder(mktemp(), BytesIO())
144147

145148
try:
146149
# Read and then write a line
@@ -181,7 +184,7 @@ def test_record_writer_with_random_data(self, save_recording=False):
181184
# RecordWriter writes apps in units of maxresultrows records. Default: 50,0000.
182185
# Partial results are written when the record count reaches maxresultrows.
183186

184-
writer = RecordWriterV2(StringIO(), maxresultrows=10) # small for the purposes of this unit test
187+
writer = RecordWriterV2(BytesIO(), maxresultrows=10) # small for the purposes of this unit test
185188
test_data = OrderedDict()
186189

187190
fieldnames = ['_serial', '_time', 'random_bytes', 'random_dict', 'random_integers', 'random_unicode']
@@ -260,76 +263,6 @@ def test_record_writer_with_random_data(self, save_recording=False):
260263
# P2 [ ] TODO: Verify that RecordWriter gives consumers the ability to finish early by calling
261264
# RecordWriter.flush(finish=True).
262265

263-
if save_recording:
264-
265-
cls = self.__class__
266-
method = cls.test_record_writer_with_recordings
267-
base_path = os.path.join(self._recordings_path, '.'.join((cls.__name__, method.__name__, six.text_type(time()))))
268-
269-
with gzip.open(base_path + '.input.gz', 'wb') as f:
270-
pickle.dump(test_data, f)
271-
272-
with open(base_path + '.output', 'wb') as f:
273-
f.write(writer._ofile.getvalue())
274-
275-
return
276-
277-
def test_record_writer_with_recordings(self):
278-
279-
cls = self.__class__
280-
method = cls.test_record_writer_with_recordings
281-
base_path = os.path.join(self._recordings_path, '.'.join((cls.__name__, method.__name__)))
282-
283-
for input_file in iglob(base_path + '*.input.gz'):
284-
285-
with gzip.open(input_file, 'rb') as ifile:
286-
test_data = pickle.load(ifile)
287-
288-
writer = RecordWriterV2(StringIO(), maxresultrows=10) # small for the purposes of this unit test
289-
write_record = writer.write_record
290-
fieldnames = test_data['fieldnames']
291-
292-
for values in test_data['values']:
293-
record = OrderedDict(izip(fieldnames, values))
294-
try:
295-
write_record(record)
296-
except Exception as error:
297-
self.fail(error)
298-
299-
for message_type, message_text in test_data['messages']:
300-
writer.write_message(message_type, '{}', message_text)
301-
302-
for name, metric in six.iteritems(test_data['metrics']):
303-
writer.write_metric(name, metric)
304-
305-
writer.flush(finished=True)
306-
307-
# Read expected data
308-
309-
expected_path = os.path.splitext(os.path.splitext(input_file)[0])[0] + '.output'
310-
311-
with io.open(expected_path, 'rb') as ifile:
312-
expected = ifile.read()
313-
314-
expected = self._load_chunks(StringIO(expected))
315-
316-
# Read observed data
317-
318-
ifile = writer._ofile
319-
ifile.seek(0)
320-
321-
observed = self._load_chunks(ifile)
322-
323-
# Write observed data (as an aid to diagnostics)
324-
325-
observed_path = expected_path + '.observed'
326-
observed_value = ifile.getvalue()
327-
328-
with io.open(observed_path, 'wb') as ifile:
329-
ifile.write(observed_value)
330-
331-
self._compare_chunks(observed, expected)
332-
333266
return
334267

335268
def _compare_chunks(self, chunks_1, chunks_2):
@@ -422,7 +355,7 @@ def playback(self, path):
422355
with open(path, 'rb') as f:
423356
test_data = pickle.load(f)
424357

425-
self._output = StringIO()
358+
self._output = BytesIO()
426359
self._recording = test_data['inputs']
427360
self._recording_part = self._recording.popleft()
428361

@@ -444,7 +377,7 @@ def stop(self):
444377

445378
def record(self, path):
446379

447-
self._output = StringIO()
380+
self._output = BytesIO()
448381
self._recording = deque()
449382
self._recording_part = OrderedDict()
450383
self._recording.append(self._recording_part)

0 commit comments

Comments
 (0)