Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions snuba/datasets/processors/search_issues_processor.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import numbers
import uuid
from datetime import datetime
from datetime import datetime, timezone
from typing import (
Any,
Dict,
Expand Down Expand Up @@ -228,7 +228,12 @@ def _process_transaction_duration(
def _process_timestamp_ms(
self, event_data: IssueEventData, processed: MutableMapping[str, Any]
) -> None:
processed["timestamp_ms"] = processed["client_timestamp"]
client_timestamp = processed["client_timestamp"]
# NOTE: we do this conversion because the JSONRowEncoder will strip out milliseconds out
# of datetime objects specifically. To work around that, we convert the datetime to a
# timestamp in milliseconds
client_timestamp = client_timestamp.replace(tzinfo=timezone.utc)
processed["timestamp_ms"] = int(client_timestamp.timestamp() * 1000)

def process_insert_v1(
self, event: SearchIssueEvent, metadata: KafkaMessageMetadata
Expand Down
10 changes: 7 additions & 3 deletions tests/datasets/test_search_issues_processor.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import copy
import uuid
from collections import OrderedDict
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from typing import Any, MutableMapping, Union

import pytest
Expand Down Expand Up @@ -128,8 +128,12 @@ def test_extract_timestamp_ms(self, message_base):
processed = self.process_message(message_base)
self.assert_required_columns(processed)
insert_row = processed.rows[0]
assert insert_row["timestamp_ms"].isoformat() + "Z" == message_base["datetime"]
assert insert_row["timestamp_ms"] == insert_row["client_timestamp"]
client_timestamp_utc = insert_row["client_timestamp"].replace(
tzinfo=timezone.utc
)
assert insert_row["timestamp_ms"] == int(
client_timestamp_utc.timestamp() * 1000
)

def test_extract_user(self, message_base):
message_with_user = message_base
Expand Down
27 changes: 27 additions & 0 deletions tests/test_search_issues_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -479,3 +479,30 @@ def test_eventstream_query_message(self) -> None:
assert response.status_code == 200, data
assert data["stats"]["consistent"]
assert data["data"] == [{"project_id": 1, "message": message}]

def test_eventstream_timestamp_ms_precision(self) -> None:
"""Test that timestamp_ms preserves millisecond precision through the full eventstream"""
now = datetime.utcnow()
now_ms = now.replace(microsecond=0) + timedelta(milliseconds=123)

insert_row = base_insert_event(now_ms)
insert_row[2]["datetime"] = now_ms.isoformat() + "Z"

response = self.app.post(
"/tests/search_issues/eventstream", data=json.dumps(insert_row)
)
assert response.status_code == 200

from_date = (now - timedelta(days=1)).isoformat()
to_date = (now + timedelta(days=1)).isoformat()
response = self.post_query(
f"""
MATCH (search_issues)
SELECT timestamp_ms
WHERE project_id = 1
AND timestamp >= toDateTime('{from_date}') AND timestamp < toDateTime('{to_date}')
"""
)
data = json.loads(response.data)

assert datetime.fromisoformat(data["data"][0]["timestamp_ms"]) == now_ms
Loading