Skip to content

Commit

Permalink
fix JSON structure returned by 'daily_logs' endpoint
Browse files Browse the repository at this point in the history
Refs: OPMONDEV-193
  • Loading branch information
nortaljevgenikr committed Feb 11, 2025
1 parent 82333d0 commit b919cd3
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 15 deletions.
7 changes: 3 additions & 4 deletions opendata_module/opmon_opendata/api/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,14 +62,13 @@ def generate_ndjson_stream(postgres: PostgreSQL_LogManager, date: datetime,
buffer_size = settings['opendata'].get('stream-buffer-lines', DEFAULT_STREAM_BUFFER_LINES)
gzip_file.write(b'[\n')
for idx, row in enumerate(data_cursor):
if idx > 0:
gzip_file.write(b',\n')
json_obj = {column_name: row[column_idx] for column_idx, column_name in enumerate(column_names)}
# Must manually convert Postgres dates to string to be compatible with JSON format
for date_column in date_columns:
json_obj[date_column] = datetime.strftime(json_obj[date_column], '%Y-%m-%d')
gzip_file.write(bytes(json.dumps(json_obj), 'utf-8'))
if idx < len(data_cursor) - 1:
gzip_file.write(b',')
gzip_file.write(b'\n')
count += 1
if count == buffer_size:
count = 0
Expand All @@ -81,7 +80,7 @@ def generate_ndjson_stream(postgres: PostgreSQL_LogManager, date: datetime,
# Empty buffer to free memory
gzip_buffer.truncate(0)
gzip_buffer.seek(0)
gzip_file.write(b']')
gzip_file.write(b'\n]')

# Final data gets written when GzipFile is closed
yield gzip_buffer.getvalue()
Expand Down
41 changes: 30 additions & 11 deletions opendata_module/opmon_opendata/tests/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import gzip
import json
import io
import pytest

from datetime import date, datetime
from opmon_opendata.api import helpers
Expand All @@ -34,20 +35,38 @@
}


def test_generate_ndjson_stream_creates_correct_json(mocker):
postgres = mocker.Mock()
postgres.get_column_names_and_types = lambda: [('foo', 'integer'), ('bar', 'character varying'), ('baz', 'date')]
postgres.get_data_cursor = lambda *args, **kwargs: [
('1', 'aaa', date(2025, 1, 1)),
('2', 'bbb', date(2025, 2, 2))
]
@pytest.fixture
def postgres(mocker):
return mocker.Mock()


@pytest.mark.parametrize(
"column_types, data_cursor, expected_output",
[
# Test case: Multiple results
(
[('foo', 'integer'), ('bar', 'character varying'), ('baz', 'date')],
[('1', 'aaa', date(2025, 1, 1)), ('2', 'bbb', date(2025, 2, 2))],
[{'foo': '1', 'bar': 'aaa', 'baz': '2025-01-01'},
{'foo': '2', 'bar': 'bbb', 'baz': '2025-02-02'}]
),
# Test case: No results
(
[('foo', 'integer')],
[],
[]
)
]
)
def test_generate_ndjson_stream_creates_correct_json(postgres, column_types, data_cursor, expected_output):
postgres.get_column_names_and_types.return_value = column_types
postgres.get_data_cursor.return_value = data_cursor
gzipped_file_stream = helpers.generate_ndjson_stream(postgres, datetime.now(), [], [], [], SETTINGS)
decompressed_data = decompress_gzip(gzipped_file_stream)
assert json.loads(decompressed_data) == expected_output

def decompress_gzip(gzipped_file_stream):
compressed_data = b"".join(gzipped_file_stream)
with gzip.GzipFile(fileobj=io.BytesIO(compressed_data), mode="rb") as f:
decompressed_data = f.read().decode()
assert json.loads(decompressed_data) == [
{'foo': '1', 'bar': 'aaa', 'baz': '2025-01-01'},
{'foo': '2', 'bar': 'bbb', 'baz': '2025-02-02'}
]
return decompressed_data

0 comments on commit b919cd3

Please sign in to comment.