diff --git a/tests/acceptance/test_explore_logs.py b/tests/acceptance/test_explore_logs.py index 6ce7105bf96f92..890da53708123d 100644 --- a/tests/acceptance/test_explore_logs.py +++ b/tests/acceptance/test_explore_logs.py @@ -8,6 +8,10 @@ FEATURE_FLAGS = [ "organizations:ourlogs-enabled", + "organizations:ourlogs-visualize-sidebar", + "organizations:ourlogs-dashboards", + "organizations:ourlogs-alerts", + "organizations:ourlogs-infinite-scroll", ] diff --git a/tests/snuba/api/endpoints/test_organization_events_ourlogs.py b/tests/snuba/api/endpoints/test_organization_events_ourlogs.py index f04a2da510624f..70eca8b6f18f41 100644 --- a/tests/snuba/api/endpoints/test_organization_events_ourlogs.py +++ b/tests/snuba/api/endpoints/test_organization_events_ourlogs.py @@ -330,3 +330,66 @@ def test_pagelimit(self): ) assert response.status_code == 400 assert response.data["detail"] == "Invalid per_page value. Must be between 1 and 9999." + + def test_homepage_query(self): + """This query matches the one made on the logs homepage so that we can be sure everything is working at least + for the initial load""" + logs = [ + self.create_ourlog( + {"body": "foo"}, + attributes={"sentry.observed_timestamp_nanos": str(self.ten_mins_ago.timestamp())}, + timestamp=self.ten_mins_ago, + ), + self.create_ourlog( + {"body": "bar"}, + attributes={"sentry.observed_timestamp_nanos": str(self.nine_mins_ago.timestamp())}, + timestamp=self.nine_mins_ago, + ), + ] + self.store_ourlogs(logs) + response = self.do_request( + { + "cursor": "", + "dataset": "ourlogs", + "field": [ + "sentry.item_id", + "project.id", + "trace", + "severity_number", + "severity", + "timestamp", + "tags[sentry.timestamp_precise,number]", + "sentry.observed_timestamp_nanos", + "message", + ], + "per_page": 1000, + "project": self.project.id, + "query": "", + "referrer": "api.explore.logs-table", + "sort": "-timestamp", + "statsPeriod": "14d", + } + ) + assert response.status_code == 200, response.content + data = response.data["data"] + meta = response.data["meta"] + assert len(data) == 2 + for result, source in zip(data, reversed(logs)): + assert result == { + "sentry.item_id": UUID(bytes=bytes(reversed(source.item_id))).hex, + "project.id": self.project.id, + "trace": source.trace_id, + "severity_number": source.attributes["sentry.severity_number"].int_value, + "severity": source.attributes["sentry.severity_text"].string_value, + "timestamp": datetime.fromtimestamp(source.timestamp.seconds) + .replace(tzinfo=timezone.utc) + .isoformat(), + "tags[sentry.timestamp_precise,number]": pytest.approx( + source.attributes["sentry.timestamp_precise"].int_value + ), + "sentry.observed_timestamp_nanos": source.attributes[ + "sentry.observed_timestamp_nanos" + ].string_value, + "message": source.attributes["sentry.body"].string_value, + } + assert meta["dataset"] == self.dataset diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_ourlogs.py b/tests/snuba/api/endpoints/test_organization_events_stats_ourlogs.py new file mode 100644 index 00000000000000..84043e479cfdfb --- /dev/null +++ b/tests/snuba/api/endpoints/test_organization_events_stats_ourlogs.py @@ -0,0 +1,99 @@ +from datetime import timedelta + +from django.urls import reverse + +from sentry.testutils.helpers.datetime import before_now +from tests.snuba.api.endpoints.test_organization_events import OrganizationEventsEndpointTestBase + + +class OrganizationEventsStatsOurlogsEndpointTest(OrganizationEventsEndpointTestBase): + endpoint = "sentry-api-0-organization-events-stats" + + def setUp(self): + super().setUp() + self.login_as(user=self.user) + self.start = self.day_ago = before_now(days=1).replace( + hour=10, minute=0, second=0, microsecond=0 + ) + self.end = self.start + timedelta(hours=6) + self.two_days_ago = self.day_ago - timedelta(days=1) + + self.url = reverse( + self.endpoint, + kwargs={"organization_id_or_slug": self.project.organization.slug}, + ) + + def _do_request(self, data, url=None, features=None): + if features is None: + features = {"organizations:ourlogs": True} + features.update(self.features) + with self.feature(features): + return self.client.get(self.url if url is None else url, data=data, format="json") + + def test_count(self): + event_counts = [6, 0, 6, 3, 0, 3] + logs = [] + for hour, count in enumerate(event_counts): + logs.extend( + [ + self.create_ourlog( + {"body": "foo"}, + timestamp=self.start + timedelta(hours=hour, minutes=minute), + attributes={"status": {"string_value": "success"}}, + ) + for minute in range(count) + ], + ) + self.store_ourlogs(logs) + + response = self._do_request( + data={ + "start": self.start, + "end": self.end, + "interval": "1h", + "yAxis": "count()", + "project": self.project.id, + "dataset": "ourlogs", + }, + ) + assert response.status_code == 200, response.content + assert [attrs for time, attrs in response.data["data"]] == [ + [{"count": count}] for count in event_counts + ] + + def test_zerofill(self): + response = self._do_request( + data={ + "start": self.start, + "end": self.end, + "interval": "1h", + "yAxis": "count()", + "project": self.project.id, + "dataset": "ourlogs", + }, + ) + assert response.status_code == 200, response.content + assert [attrs for time, attrs in response.data["data"]] == [[{"count": 0}]] * 7 + + def test_homepage_query(self): + """This query matches the one made on the logs homepage so that we can be sure everything is working at least + for the initial load""" + response = self._do_request( + data={ + "dataset": "ourlogs", + "excludeOther": 0, + "field": ["count(message)"], + "interval": "1h", + "orderby": "-count_message", + "partial": 1, + "per_page": 50, + "project": self.project.id, + "query": f"tags[sentry.timestamp_precise,number]:<={self.start.timestamp() * 1000000}", + "referrer": "explore.ourlogs.main-chart", + "sort": "-count_message", + "statsPeriod": "14d", + "yAxis": "count(message)", + }, + ) + assert response.status_code == 200, response.content + assert [attrs for time, attrs in response.data["data"]] == [[{"count": 0}]] * 338