test(integration): add integration tests for logs (#8619)

This commit is contained in:
Vibhu Pandey 2025-07-29 14:44:16 +05:30 committed by GitHub
parent fa936a7e0d
commit c17241272f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 2611 additions and 785 deletions

View File

@ -15,6 +15,8 @@ jobs:
matrix:
src:
- bootstrap
- auth
- querier
sqlstore-provider:
- postgres
- sqlite

View File

@ -1,3 +1,10 @@
FROM node:18-bullseye AS build
WORKDIR /opt/
COPY ./frontend/ ./
RUN CI=1 yarn install
RUN CI=1 yarn build
FROM golang:1.23-bullseye
ARG OS="linux"
@ -32,6 +39,8 @@ COPY Makefile Makefile
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
COPY --from=build /opt/build ./web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["/root/signoz", "server"]

View File

@ -12,15 +12,22 @@ pytest_plugins = [
"fixtures.sqlite",
"fixtures.zookeeper",
"fixtures.signoz",
"fixtures.logs",
]
def pytest_addoption(parser: pytest.Parser):
parser.addoption(
"--dev",
action="store",
"--reuse",
action="store_true",
default=False,
help="Run in dev mode. In this mode, the containers are not torn down after test run and are reused in subsequent test runs.",
help="Reuse environment. Use pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/setup::test_setup to setup your local dev environment for writing tests.",
)
parser.addoption(
"--teardown",
action="store_true",
default=False,
help="Teardown environment. Run pytest --basetemp=./tmp/ -vv --teardown src/bootstrap/setup::test_teardown to teardown your local dev environment.",
)
parser.addoption(
"--sqlstore-provider",
@ -49,6 +56,6 @@ def pytest_addoption(parser: pytest.Parser):
parser.addoption(
"--schema-migrator-version",
action="store",
default="v0.111.38",
default="v0.128.2",
help="schema migrator version",
)

View File

@ -1,36 +1,59 @@
from http import HTTPStatus
from typing import Callable
import pytest
import requests
from fixtures import types
from fixtures import dev, types
USER_ADMIN_NAME = "admin"
USER_ADMIN_EMAIL = "admin@integration.test"
USER_ADMIN_PASSWORD = "password"
@pytest.fixture(name="create_first_user", scope="function")
def create_first_user(signoz: types.SigNoz) -> None:
def _create_user(name: str, email: str, password: str) -> None:
@pytest.fixture(name="create_user_admin", scope="package")
def create_user_admin(
signoz: types.SigNoz, request: pytest.FixtureRequest, pytestconfig: pytest.Config
) -> types.Operation:
def create() -> None:
response = requests.post(
signoz.self.host_config.get("/api/v1/register"),
signoz.self.host_configs["8080"].get("/api/v1/register"),
json={
"name": name,
"name": USER_ADMIN_NAME,
"orgId": "",
"orgName": "",
"email": email,
"password": password,
"email": USER_ADMIN_EMAIL,
"password": USER_ADMIN_PASSWORD,
},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
return _create_user
return types.Operation(name="create_user_admin")
def delete(_: types.Operation) -> None:
pass
def restore(cache: dict) -> types.Operation:
return types.Operation(name=cache["name"])
return dev.wrap(
request,
pytestconfig,
"create_user_admin",
lambda: types.Operation(name=""),
create,
delete,
restore,
)
@pytest.fixture(name="get_jwt_token", scope="module")
def get_jwt_token(signoz: types.SigNoz) -> str:
def get_jwt_token(signoz: types.SigNoz) -> Callable[[str, str], str]:
def _get_jwt_token(email: str, password: str) -> str:
response = requests.post(
signoz.self.host_config.get("/api/v1/login"),
signoz.self.host_configs["8080"].get("/api/v1/login"),
json={
"email": email,
"password": password,

View File

@ -1,13 +1,19 @@
import dataclasses
import os
from typing import Any, Generator
import clickhouse_driver
import clickhouse_connect
import clickhouse_connect.driver
import clickhouse_connect.driver.client
import docker
import docker.errors
import pytest
from testcontainers.clickhouse import ClickHouseContainer
from testcontainers.core.container import Network
from fixtures import types
from fixtures import dev, types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(name="clickhouse", scope="package")
@ -21,144 +27,153 @@ def clickhouse(
"""
Package-scoped fixture for Clickhouse TestContainer.
"""
dev = request.config.getoption("--dev")
if dev:
container = pytestconfig.cache.get("clickhouse.container", None)
env = pytestconfig.cache.get("clickhouse.env", None)
if container and env:
assert isinstance(container, dict)
assert isinstance(env, dict)
def create() -> types.TestContainerClickhouse:
version = request.config.getoption("--clickhouse-version")
test_container = types.TestContainerDocker(
host_config=types.TestContainerUrlConfig(
container["host_config"]["scheme"],
container["host_config"]["address"],
container["host_config"]["port"],
),
container_config=types.TestContainerUrlConfig(
container["container_config"]["scheme"],
container["container_config"]["address"],
container["container_config"]["port"],
),
)
connection = clickhouse_driver.connect(
user=env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_USERNAME"],
password=env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_PASSWORD"],
host=test_container.host_config.address,
port=test_container.host_config.port,
)
return types.TestContainerClickhouse(
container=test_container,
conn=connection,
env=env,
)
version = request.config.getoption("--clickhouse-version")
container = ClickHouseContainer(
image=f"clickhouse/clickhouse-server:{version}",
port=9000,
username="signoz",
password="password",
)
cluster_config = f"""
<clickhouse>
<logger>
<level>information</level>
<formatting>
<type>json</type>
</formatting>
<log>/var/log/clickhouse-server/clickhouse-server.log</log>
<errorlog>/var/log/clickhouse-server/clickhouse-server.err.log</errorlog>
<size>1000M</size>
<count>3</count>
<console>1</console>
</logger>
<macros>
<shard>01</shard>
<replica>01</replica>
</macros>
<zookeeper>
<node>
<host>{zookeeper.container_config.address}</host>
<port>{zookeeper.container_config.port}</port>
</node>
</zookeeper>
<remote_servers>
<cluster>
<shard>
<replica>
<host>127.0.0.1</host>
<port>9000</port>
</replica>
</shard>
</cluster>
</remote_servers>
<distributed_ddl>
<path>/clickhouse/task_queue/ddl</path>
<profile>default</profile>
</distributed_ddl>
</clickhouse>
"""
tmp_dir = tmpfs("clickhouse")
cluster_config_file_path = os.path.join(tmp_dir, "cluster.xml")
with open(cluster_config_file_path, "w", encoding="utf-8") as f:
f.write(cluster_config)
container.with_volume_mapping(
cluster_config_file_path, "/etc/clickhouse-server/config.d/cluster.xml"
)
container.with_network(network)
container.start()
connection = clickhouse_driver.connect(
user=container.username,
password=container.password,
host=container.get_container_host_ip(),
port=container.get_exposed_port(9000),
)
def stop():
if dev:
return
connection.close()
container.stop(delete_volume=True)
request.addfinalizer(stop)
cached_clickhouse = types.TestContainerClickhouse(
container=types.TestContainerDocker(
host_config=types.TestContainerUrlConfig(
"tcp",
container.get_container_host_ip(),
container.get_exposed_port(9000),
),
container_config=types.TestContainerUrlConfig(
"tcp", container.get_wrapped_container().name, 9000
),
),
conn=connection,
env={
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN": f"tcp://{container.username}:{container.password}@{container.get_wrapped_container().name}:{9000}", # pylint: disable=line-too-long
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_USERNAME": container.username,
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_PASSWORD": container.password,
},
)
if dev:
pytestconfig.cache.set(
"clickhouse.container", dataclasses.asdict(cached_clickhouse.container)
container = ClickHouseContainer(
image=f"clickhouse/clickhouse-server:{version}",
port=9000,
username="signoz",
password="password",
)
pytestconfig.cache.set("clickhouse.env", cached_clickhouse.env)
return cached_clickhouse
cluster_config = f"""
<clickhouse>
<logger>
<level>information</level>
<formatting>
<type>json</type>
</formatting>
<log>/var/log/clickhouse-server/clickhouse-server.log</log>
<errorlog>/var/log/clickhouse-server/clickhouse-server.err.log</errorlog>
<size>1000M</size>
<count>3</count>
<console>1</console>
</logger>
<macros>
<shard>01</shard>
<replica>01</replica>
</macros>
<zookeeper>
<node>
<host>{zookeeper.container_configs["2181"].address}</host>
<port>{zookeeper.container_configs["2181"].port}</port>
</node>
</zookeeper>
<remote_servers>
<cluster>
<shard>
<replica>
<host>127.0.0.1</host>
<port>9000</port>
</replica>
</shard>
</cluster>
</remote_servers>
<distributed_ddl>
<path>/clickhouse/task_queue/ddl</path>
<profile>default</profile>
</distributed_ddl>
</clickhouse>
"""
tmp_dir = tmpfs("clickhouse")
cluster_config_file_path = os.path.join(tmp_dir, "cluster.xml")
with open(cluster_config_file_path, "w", encoding="utf-8") as f:
f.write(cluster_config)
container.with_volume_mapping(
cluster_config_file_path, "/etc/clickhouse-server/config.d/cluster.xml"
)
container.with_network(network)
container.start()
connection = clickhouse_connect.get_client(
user=container.username,
password=container.password,
host=container.get_container_host_ip(),
port=container.get_exposed_port(8123),
)
return types.TestContainerClickhouse(
container=types.TestContainerDocker(
id=container.get_wrapped_container().id,
host_configs={
"9000": types.TestContainerUrlConfig(
"tcp",
container.get_container_host_ip(),
container.get_exposed_port(9000),
),
"8123": types.TestContainerUrlConfig(
"tcp",
container.get_container_host_ip(),
container.get_exposed_port(8123),
),
},
container_configs={
"9000": types.TestContainerUrlConfig(
"tcp", container.get_wrapped_container().name, 9000
),
"8123": types.TestContainerUrlConfig(
"tcp", container.get_wrapped_container().name, 8123
),
},
),
conn=connection,
env={
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN": f"tcp://{container.username}:{container.password}@{container.get_wrapped_container().name}:{9000}",
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_USERNAME": container.username,
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_PASSWORD": container.password,
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER": "cluster",
},
)
def delete(container: types.TestContainerClickhouse) -> None:
client = docker.from_env()
try:
client.containers.get(container_id=container.container.id).stop()
client.containers.get(container_id=container.container.id).remove(v=True)
except docker.errors.NotFound:
logger.info(
"Skipping removal of Clickhouse, Clickhouse(%s) not found. Maybe it was manually removed?",
{"id": container.id},
)
def restore(cache: dict) -> types.TestContainerClickhouse:
container = types.TestContainerDocker.from_cache(cache["container"])
host_config = container.host_configs["8123"]
env = cache["env"]
conn = clickhouse_connect.get_client(
user=env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_USERNAME"],
password=env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_PASSWORD"],
host=host_config.address,
port=host_config.port,
)
return types.TestContainerClickhouse(
container=container,
conn=conn,
env=env,
)
return dev.wrap(
request,
pytestconfig,
"clickhouse",
empty=lambda: types.TestContainerSQL(
container=types.TestContainerDocker(
id="", host_configs={}, container_configs={}
),
conn=None,
env={},
),
create=create,
delete=delete,
restore=restore,
)

View File

@ -0,0 +1,102 @@
from typing import Callable, TypeVar
import pytest
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
T = TypeVar("T")
def reuse(request: pytest.FixtureRequest) -> bool:
return request.config.getoption("--reuse")
def teardown(request: pytest.FixtureRequest) -> bool:
return request.config.getoption("--teardown")
def get_cached_resource(pytestconfig: pytest.Config, key: str):
"""Get a resource from pytest cache by key."""
return pytestconfig.cache.get(key, None)
def set_cached_resource(pytestconfig: pytest.Config, key: str, value):
"""Set a resource in pytest cache by key."""
pytestconfig.cache.set(key, value)
def remove_cached_resource(pytestconfig: pytest.Config, key: str):
"""Remove a resource from pytest cache by key (set to None)."""
pytestconfig.cache.set(key, None)
def wrap( # pylint: disable=too-many-arguments,too-many-positional-arguments
request: pytest.FixtureRequest,
pytestconfig: pytest.Config,
key: str,
empty: Callable[[], T],
create: Callable[[], T],
delete: Callable[[T], None],
restore: Callable[[dict], T],
) -> T:
"""
Wraps a resource creation and cleanup process with reuse and teardown options.
- request: pytest.FixtureRequest
- pytestconfig: pytest.Config
- key: cache key for the resource
- empty: function to create an empty resource
- create: function to create the resource
- delete: function to delete the resource
- restore: function to restore resource from cache
"""
resource = empty()
if reuse(request):
existing_resource = pytestconfig.cache.get(key, None)
if existing_resource:
assert isinstance(existing_resource, dict)
logger.info("Reusing existing %s(%s)", key, existing_resource)
return restore(existing_resource)
if not teardown(request):
resource = create()
def finalizer():
nonlocal resource
if reuse(request):
logger.info(
"Skipping removal of %s",
resource.__log__() if hasattr(resource, "__log__") else resource,
)
return
if teardown(request):
existing_resource = pytestconfig.cache.get(key, None)
if not existing_resource:
logger.info(
"Skipping removal of %s, no existing %s found. Maybe you ran teardown without reuse?",
key,
key,
)
return
resource = restore(existing_resource)
logger.info(
"Removing %s",
resource.__log__() if hasattr(resource, "__log__") else resource,
)
delete(resource)
pytestconfig.cache.set(key, None)
request.addfinalizer(finalizer)
if reuse(request):
pytestconfig.cache.set(
key, resource.__cache__() if hasattr(resource, "__cache__") else resource
)
return resource

View File

@ -0,0 +1,54 @@
from abc import ABC
from typing import Any
class LogsOrTracesFingerprint(ABC):
attributes: dict[str, Any]
def __init__(self, attributes: dict[str, Any]) -> None:
self.attributes = attributes
def calculate(self) -> str:
parts = []
for attr in self.attributes:
parts.append(f"{attr}={self.attributes[attr]}")
fhash = LogsOrTracesFingerprint.hash(self.attributes)
parts.append(f"hash={fhash}")
return ";".join(parts)
@staticmethod
def hash(attributes: dict[str, Any]) -> int:
# FNV-1a constants
offset64 = 14695981039346656037
prime64 = 1099511628211
separator_byte = 255
if not attributes:
return offset64
def hash_add(h: int, s: str) -> int:
"""Add a string to a fnv64a hash value, returning the updated hash."""
for char in s:
h ^= ord(char)
h = (h * prime64) & 0xFFFFFFFFFFFFFFFF # Keep 64-bit
return h
def hash_add_byte(h: int, b: int) -> int:
"""Add a byte to a fnv64a hash value, returning the updated hash."""
h ^= b
h = (h * prime64) & 0xFFFFFFFFFFFFFFFF # Keep 64-bit
return h
# Sort keys to ensure consistent hash
keys = sorted(attributes.keys())
sum_hash = offset64
for key in keys:
sum_hash = hash_add(sum_hash, key)
sum_hash = hash_add_byte(sum_hash, separator_byte)
sum_hash = hash_add(sum_hash, str(attributes[key]))
sum_hash = hash_add_byte(sum_hash, separator_byte)
return sum_hash

View File

@ -1,6 +1,7 @@
import dataclasses
from typing import List
import docker
import docker.errors
import pytest
from testcontainers.core.container import Network
from wiremock.client import (
@ -11,7 +12,10 @@ from wiremock.client import (
from wiremock.constants import Config
from wiremock.testing.testcontainer import WireMockContainer
from fixtures import types
from fixtures import dev, types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(name="zeus", scope="package")
@ -23,55 +27,57 @@ def zeus(
"""
Package-scoped fixture for running zeus
"""
dev = request.config.getoption("--dev")
if dev:
cached_zeus = pytestconfig.cache.get("zeus", None)
if cached_zeus:
return types.TestContainerDocker(
host_config=types.TestContainerUrlConfig(
cached_zeus["host_config"]["scheme"],
cached_zeus["host_config"]["address"],
cached_zeus["host_config"]["port"],
),
container_config=types.TestContainerUrlConfig(
cached_zeus["container_config"]["scheme"],
cached_zeus["container_config"]["address"],
cached_zeus["container_config"]["port"],
),
def create() -> types.TestContainerDocker:
container = WireMockContainer(image="wiremock/wiremock:2.35.1-1", secure=False)
container.with_network(network)
container.start()
return types.TestContainerDocker(
id=container.get_wrapped_container().id,
host_configs={
"8080": types.TestContainerUrlConfig(
"http",
container.get_container_host_ip(),
container.get_exposed_port(8080),
)
},
container_configs={
"8080": types.TestContainerUrlConfig(
"http", container.get_wrapped_container().name, 8080
)
},
)
def delete(container: types.TestContainerDocker):
client = docker.from_env()
try:
client.containers.get(container_id=container.id).stop()
client.containers.get(container_id=container.id).remove(v=True)
except docker.errors.NotFound:
logger.info(
"Skipping removal of Zeus, Zeus(%s) not found. Maybe it was manually removed?",
{"id": container.id},
)
container = WireMockContainer(image="wiremock/wiremock:2.35.1-1", secure=False)
container.with_network(network)
def restore(cache: dict) -> types.TestContainerDocker:
return types.TestContainerDocker.from_cache(cache)
container.start()
def stop():
if dev:
return
container.stop(delete_volume=True)
request.addfinalizer(stop)
cached_zeus = types.TestContainerDocker(
host_config=types.TestContainerUrlConfig(
"http", container.get_container_host_ip(), container.get_exposed_port(8080)
),
container_config=types.TestContainerUrlConfig(
"http", container.get_wrapped_container().name, 8080
),
return dev.wrap(
request,
pytestconfig,
"zeus",
lambda: types.TestContainerDocker(id="", host_configs={}, container_configs={}),
create,
delete,
restore,
)
if dev:
pytestconfig.cache.set("zeus", dataclasses.asdict(cached_zeus))
return cached_zeus
@pytest.fixture(name="make_http_mocks", scope="function")
def make_http_mocks():
def _make_http_mocks(container: types.TestContainerDocker, mappings: List[Mapping]):
Config.base_url = container.host_config.get("/__admin")
Config.base_url = container.host_configs["8080"].get("/__admin")
for mapping in mappings:
Mappings.create_mapping(mapping=mapping)

View File

@ -0,0 +1,402 @@
import datetime
import json
from abc import ABC
from typing import Any, Callable, Generator, List
import numpy as np
import pytest
from ksuid import KsuidMs
from fixtures import types
from fixtures.fingerprint import LogsOrTracesFingerprint
class LogsResource(ABC):
labels: str
fingerprint: str
seen_at_ts_bucket_start: np.int64
def __init__(
self,
labels: dict[str, str],
fingerprint: str,
seen_at_ts_bucket_start: np.int64,
) -> None:
self.labels = json.dumps(labels)
self.fingerprint = fingerprint
self.seen_at_ts_bucket_start = seen_at_ts_bucket_start
def np_arr(self) -> np.array:
return np.array([self.labels, self.fingerprint, self.seen_at_ts_bucket_start])
class LogsResourceOrAttributeKeys(ABC):
name: str
datatype: str
def __init__(self, name: str, datatype: str) -> None:
self.name = name
self.datatype = datatype
def np_arr(self) -> np.array:
return np.array([self.name, self.datatype])
class LogsTagAttributes(ABC):
unix_milli: np.int64
tag_key: str
tag_type: str
tag_data_type: str
string_value: str
number_value: np.float64
def __init__(
self,
timestamp: datetime.datetime,
tag_key: str,
tag_type: str,
tag_data_type: str,
string_value: str,
number_value: np.float64,
) -> None:
self.unix_milli = np.int64(int(timestamp.timestamp() * 1e3))
self.tag_key = tag_key
self.tag_type = tag_type
self.tag_data_type = tag_data_type
self.string_value = string_value or ""
self.number_value = number_value
def np_arr(self) -> np.array:
return np.array(
[
self.unix_milli,
self.tag_key,
self.tag_type,
self.tag_data_type,
self.string_value,
self.number_value,
]
)
class Logs(ABC):
ts_bucket_start: np.uint64
resource_fingerprint: str
timestamp: np.uint64
observed_timestamp: np.uint64
id: str
trace_id: str
span_id: str
trace_flags: np.uint32
severity_text: str
severity_number: np.uint8
body: str
attributes_string: dict[str, str]
attributes_number: dict[str, np.float64]
attributes_bool: dict[str, bool]
resources_string: dict[str, str]
scope_name: str
scope_version: str
scope_string: dict[str, str]
resource: List[LogsResource]
tag_attributes: List[LogsTagAttributes]
resource_keys: List[LogsResourceOrAttributeKeys]
attribute_keys: List[LogsResourceOrAttributeKeys]
def __init__(
self,
timestamp: datetime.datetime = datetime.datetime.now(),
resources: dict[str, Any] = {},
attributes: dict[str, Any] = {},
body: str = "default body",
severity_text: str = "INFO",
trace_id: str = "",
span_id: str = "",
trace_flags: np.uint32 = 0,
scope_name: str = "",
scope_version: str = "",
scope_attributes: dict[str, str] = {},
) -> None:
self.tag_attributes = []
self.attribute_keys = []
self.resource_keys = []
# Convert timestamp to uint64 nanoseconds
self.timestamp = np.uint64(int(timestamp.timestamp() * 1e9))
self.observed_timestamp = self.timestamp
# Calculate ts_bucket_start (30mins bucket)
# Round down to nearest 30-minute interval
minute = timestamp.minute
if minute < 30:
bucket_minute = 0
else:
bucket_minute = 30
bucket_start = timestamp.replace(minute=bucket_minute, second=0, microsecond=0)
self.ts_bucket_start = np.uint64(int(bucket_start.timestamp()))
# Generate ksuid by using the timestamp
self.id = str(KsuidMs(datetime=timestamp))
# Initialize trace fields
self.trace_id = trace_id
self.span_id = span_id
self.trace_flags = trace_flags
# Set severity fields
self.severity_text = severity_text
self.severity_number = self._get_severity_number(severity_text)
# Set body
self.body = body
# Process resources and attributes
self.resources_string = {k: str(v) for k, v in resources.items()}
for k, v in self.resources_string.items():
self.tag_attributes.append(
LogsTagAttributes(
timestamp=timestamp,
tag_key=k,
tag_type="resource",
tag_data_type="string",
string_value=v,
number_value=None,
)
)
self.resource_keys.append(
LogsResourceOrAttributeKeys(name=k, datatype="string")
)
# Calculate resource fingerprint
self.resource_fingerprint = LogsOrTracesFingerprint(
self.resources_string
).calculate()
# Process attributes by type
self.attributes_string = {}
self.attributes_number = {}
self.attributes_bool = {}
for k, v in attributes.items():
if isinstance(v, bool):
self.attributes_bool[k] = v
self.tag_attributes.append(
LogsTagAttributes(
timestamp=timestamp,
tag_key=k,
tag_type="tag",
tag_data_type="bool",
string_value=None,
number_value=None,
)
)
self.attribute_keys.append(
LogsResourceOrAttributeKeys(name=k, datatype="bool")
)
elif isinstance(v, (int, float)):
self.attributes_number[k] = np.float64(v)
self.tag_attributes.append(
LogsTagAttributes(
timestamp=timestamp,
tag_key=k,
tag_type="tag",
tag_data_type="float64",
string_value=None,
number_value=np.float64(v),
)
)
self.attribute_keys.append(
LogsResourceOrAttributeKeys(name=k, datatype="float64")
)
else:
self.attributes_string[k] = str(v)
self.tag_attributes.append(
LogsTagAttributes(
timestamp=timestamp,
tag_key=k,
tag_type="tag",
tag_data_type="string",
string_value=str(v),
number_value=None,
)
)
self.attribute_keys.append(
LogsResourceOrAttributeKeys(name=k, datatype="string")
)
# Initialize scope fields
self.scope_name = scope_name
self.scope_version = scope_version
self.scope_string = {k: str(v) for k, v in scope_attributes.items()}
for k, v in self.scope_string.items():
self.tag_attributes.append(
LogsTagAttributes(
timestamp=timestamp,
tag_key=k,
tag_type="scope",
tag_data_type="string",
string_value=v,
number_value=None,
)
)
self.resource = []
self.resource.append(
LogsResource(
labels=self.resources_string,
fingerprint=self.resource_fingerprint,
seen_at_ts_bucket_start=self.ts_bucket_start,
)
)
# Log fields (severity)
self.tag_attributes.append(
LogsTagAttributes(
timestamp=timestamp,
tag_key="severity_text",
tag_type="logfield",
tag_data_type="string",
string_value=self.severity_text,
number_value=None,
)
)
self.attribute_keys.append(
LogsResourceOrAttributeKeys(name="severity_text", datatype="string")
)
self.tag_attributes.append(
LogsTagAttributes(
timestamp=timestamp,
tag_key="severity_number",
tag_type="logfield",
tag_data_type="float64",
string_value=None,
number_value=float(self.severity_number),
)
)
self.attribute_keys.append(
LogsResourceOrAttributeKeys(name="severity_number", datatype="float64")
)
def _get_severity_number(self, severity_text: str) -> np.uint8:
"""Convert severity text to numeric value"""
severity_map = {
"TRACE": 1,
"DEBUG": 5,
"INFO": 9,
"WARN": 13,
"ERROR": 17,
"FATAL": 21,
}
return np.uint8(severity_map.get(severity_text.upper(), 9)) # Default to INFO
def np_arr(self) -> np.array:
"""Return log data as numpy array for database insertion"""
return np.array(
[
self.ts_bucket_start,
self.resource_fingerprint,
self.timestamp,
self.observed_timestamp,
self.id,
self.trace_id,
self.span_id,
self.trace_flags,
self.severity_text,
self.severity_number,
self.body,
self.attributes_string,
self.attributes_number,
self.attributes_bool,
self.resources_string,
self.scope_name,
self.scope_version,
self.scope_string,
]
)
@pytest.fixture(name="insert_logs", scope="function")
def insert_logs(
clickhouse: types.TestContainerClickhouse,
) -> Generator[Callable[[List[Logs]], None], Any, None]:
def _insert_logs(logs: List[Logs]) -> None:
"""
Insert logs into ClickHouse tables following the same logic as the Go exporter.
This function handles insertion into multiple tables:
- distributed_logs_v2 (main logs table)
- distributed_logs_v2_resource (resource fingerprints)
- distributed_tag_attributes_v2 (tag attributes)
- distributed_logs_attribute_keys (attribute keys)
- distributed_logs_resource_keys (resource keys)
"""
resources: List[LogsResource] = []
for log in logs:
resources.extend(log.resource)
if len(resources) > 0:
clickhouse.conn.insert(
database="signoz_logs",
table="distributed_logs_v2_resource",
data=[resource.np_arr() for resource in resources],
)
tag_attributes: List[LogsTagAttributes] = []
for log in logs:
tag_attributes.extend(log.tag_attributes)
if len(tag_attributes) > 0:
clickhouse.conn.insert(
database="signoz_logs",
table="distributed_tag_attributes_v2",
data=[tag_attribute.np_arr() for tag_attribute in tag_attributes],
)
attribute_keys: List[LogsResourceOrAttributeKeys] = []
for log in logs:
attribute_keys.extend(log.attribute_keys)
if len(attribute_keys) > 0:
clickhouse.conn.insert(
database="signoz_logs",
table="distributed_logs_attribute_keys",
data=[attribute_key.np_arr() for attribute_key in attribute_keys],
)
resource_keys: List[LogsResourceOrAttributeKeys] = []
for log in logs:
resource_keys.extend(log.resource_keys)
if len(resource_keys) > 0:
clickhouse.conn.insert(
database="signoz_logs",
table="distributed_logs_resource_keys",
data=[resource_key.np_arr() for resource_key in resource_keys],
)
clickhouse.conn.insert(
database="signoz_logs",
table="distributed_logs_v2",
data=[log.np_arr() for log in logs],
)
yield _insert_logs
clickhouse.conn.query(
f"TRUNCATE TABLE signoz_logs.logs_v2 ON CLUSTER '{clickhouse.env['SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER']}' SYNC"
)
clickhouse.conn.query(
f"TRUNCATE TABLE signoz_logs.logs_v2_resource ON CLUSTER '{clickhouse.env['SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER']}' SYNC"
)
clickhouse.conn.query(
f"TRUNCATE TABLE signoz_logs.tag_attributes_v2 ON CLUSTER '{clickhouse.env['SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER']}' SYNC"
)
clickhouse.conn.query(
f"TRUNCATE TABLE signoz_logs.logs_attribute_keys ON CLUSTER '{clickhouse.env['SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER']}' SYNC"
)
clickhouse.conn.query(
f"TRUNCATE TABLE signoz_logs.logs_resource_keys ON CLUSTER '{clickhouse.env['SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER']}' SYNC"
)

View File

@ -2,7 +2,10 @@ import docker
import pytest
from testcontainers.core.container import Network
from fixtures import types
from fixtures import dev, types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(name="migrator", scope="package")
@ -11,55 +14,65 @@ def migrator(
clickhouse: types.TestContainerClickhouse,
request: pytest.FixtureRequest,
pytestconfig: pytest.Config,
) -> None:
) -> types.Operation:
"""
Package-scoped fixture for running schema migrations.
"""
dev = request.config.getoption("--dev")
if dev:
cached_migrator = pytestconfig.cache.get("migrator", None)
if cached_migrator is not None and cached_migrator is True:
return None
version = request.config.getoption("--schema-migrator-version")
def create() -> None:
version = request.config.getoption("--schema-migrator-version")
client = docker.from_env()
client = docker.from_env()
container = client.containers.run(
image=f"signoz/signoz-schema-migrator:{version}",
command=f"sync --replication=true --cluster-name=cluster --up= --dsn={clickhouse.env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN"]}",
detach=True,
auto_remove=False,
network=network.id,
)
container = client.containers.run(
image=f"signoz/signoz-schema-migrator:{version}",
command=f"sync --replication=true --cluster-name=cluster --up= --dsn={clickhouse.env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN"]}", # pylint: disable=line-too-long
detach=True,
auto_remove=False,
network=network.id,
)
result = container.wait()
result = container.wait()
if result["StatusCode"] != 0:
logs = container.logs().decode(encoding="utf-8")
container.remove()
print(logs)
raise RuntimeError("failed to run migrations on clickhouse")
if result["StatusCode"] != 0:
logs = container.logs().decode(encoding="utf-8")
container.remove()
print(logs)
raise RuntimeError("failed to run migrations on clickhouse")
container.remove()
container = client.containers.run(
image=f"signoz/signoz-schema-migrator:{version}",
command=f"async --replication=true --cluster-name=cluster --up= --dsn={clickhouse.env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN"]}",
detach=True,
auto_remove=False,
network=network.id,
)
container = client.containers.run(
image=f"signoz/signoz-schema-migrator:{version}",
command=f"async --replication=true --cluster-name=cluster --up= --dsn={clickhouse.env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN"]}", # pylint: disable=line-too-long
detach=True,
auto_remove=False,
network=network.id,
)
result = container.wait()
result = container.wait()
if result["StatusCode"] != 0:
logs = container.logs().decode(encoding="utf-8")
container.remove()
print(logs)
raise RuntimeError("failed to run migrations on clickhouse")
if result["StatusCode"] != 0:
logs = container.logs().decode(encoding="utf-8")
container.remove()
print(logs)
raise RuntimeError("failed to run migrations on clickhouse")
container.remove()
return types.Operation(name="migrator")
if dev:
pytestconfig.cache.set("migrator", True)
def delete(_: types.Operation) -> None:
pass
def restore(cache: dict) -> types.Operation:
return types.Operation(name=cache["name"])
return dev.wrap(
request,
pytestconfig,
"migrator",
lambda: types.Operation(name=""),
create,
delete,
restore,
)

View File

@ -1,45 +1,47 @@
import docker
import pytest
from testcontainers.core.container import Network
from fixtures import dev, types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(name="network", scope="package")
def network(request: pytest.FixtureRequest, pytestconfig: pytest.Config) -> Network:
def network(
request: pytest.FixtureRequest, pytestconfig: pytest.Config
) -> types.Network:
"""
Package-Scoped fixture for creating a network
"""
nw = Network()
dev = request.config.getoption("--dev")
if dev:
cached_network = pytestconfig.cache.get("network", None)
if cached_network:
logger.info("Using cached Network(%s)", cached_network)
nw.id = cached_network["id"]
nw.name = cached_network["name"]
return nw
def create() -> types.Network:
nw = types.Network()
nw.create()
return nw
nw.create()
def stop():
dev = request.config.getoption("--dev")
if dev:
def delete(nw: types.Network):
client = docker.from_env()
try:
client.networks.get(network_id=nw.id).remove()
except docker.errors.NotFound:
logger.info(
"Skipping removal of Network(%s)", {"name": nw.name, "id": nw.id}
"Skipping removal of Network, Network(%s) not found. Maybe it was manually removed?",
{"name": nw.name, "id": nw.id},
)
else:
logger.info("Removing Network(%s)", {"name": nw.name, "id": nw.id})
nw.remove()
request.addfinalizer(stop)
def restore(existing: dict) -> types.Network:
nw = types.Network()
nw.id = existing.get("id")
nw.name = existing.get("name")
return nw
cached_network = nw
if dev:
pytestconfig.cache.set(
"network", {"name": cached_network.name, "id": cached_network.id}
)
return cached_network
return dev.wrap(
request,
pytestconfig,
"network",
lambda: types.Network(), # pylint: disable=unnecessary-lambda
create,
delete,
restore,
)

View File

@ -1,11 +1,14 @@
import dataclasses
import docker
import docker.errors
import psycopg2
import pytest
from testcontainers.core.container import Network
from testcontainers.postgres import PostgresContainer
from fixtures import types
from fixtures import dev, types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(name="postgres", scope="package")
@ -15,95 +18,97 @@ def postgres(
"""
Package-scoped fixture for PostgreSQL TestContainer.
"""
dev = request.config.getoption("--dev")
if dev:
container = pytestconfig.cache.get("postgres.container", None)
env = pytestconfig.cache.get("postgres.env", None)
if container and env:
assert isinstance(container, dict)
assert isinstance(env, dict)
def create() -> types.TestContainerSQL:
version = request.config.getoption("--postgres-version")
test_container = types.TestContainerDocker(
host_config=types.TestContainerUrlConfig(
container["host_config"]["scheme"],
container["host_config"]["address"],
container["host_config"]["port"],
),
container_config=types.TestContainerUrlConfig(
container["container_config"]["scheme"],
container["container_config"]["address"],
container["container_config"]["port"],
),
)
return types.TestContainerSQL(
container=test_container,
conn=psycopg2.connect(
dbname=env["SIGNOZ_SQLSTORE_POSTGRES_DBNAME"],
user=env["SIGNOZ_SQLSTORE_POSTGRES_USER"],
password=env["SIGNOZ_SQLSTORE_POSTGRES_PASSWORD"],
host=test_container.host_config.address,
port=test_container.host_config.port,
),
env=env,
)
version = request.config.getoption("--postgres-version")
container = PostgresContainer(
image=f"postgres:{version}",
port=5432,
username="signoz",
password="password",
dbname="signoz",
driver="psycopg2",
network=network.id,
)
container.start()
connection = psycopg2.connect(
dbname=container.dbname,
user=container.username,
password=container.password,
host=container.get_container_host_ip(),
port=container.get_exposed_port(5432),
)
def stop():
if dev:
return
connection.close()
container.stop(delete_volume=True)
request.addfinalizer(stop)
cached_postgres = types.TestContainerSQL(
container=types.TestContainerDocker(
host_config=types.TestContainerUrlConfig(
"postgresql",
container.get_container_host_ip(),
container.get_exposed_port(5432),
),
container_config=types.TestContainerUrlConfig(
"postgresql", container.get_wrapped_container().name, 5432
),
),
conn=connection,
env={
"SIGNOZ_SQLSTORE_PROVIDER": "postgres",
"SIGNOZ_SQLSTORE_POSTGRES_DSN": f"postgresql://{container.username}:{container.password}@{container.get_wrapped_container().name}:{5432}/{container.dbname}", # pylint: disable=line-too-long
"SIGNOZ_SQLSTORE_POSTGRES_DBNAME": container.dbname,
"SIGNOZ_SQLSTORE_POSTGRES_USER": container.username,
"SIGNOZ_SQLSTORE_POSTGRES_PASSWORD": container.password,
},
)
if dev:
pytestconfig.cache.set(
"postgres.container", dataclasses.asdict(cached_postgres.container)
container = PostgresContainer(
image=f"postgres:{version}",
port=5432,
username="signoz",
password="password",
dbname="signoz",
driver="psycopg2",
network=network.id,
)
pytestconfig.cache.set("postgres.env", cached_postgres.env)
container.start()
return cached_postgres
connection = psycopg2.connect(
dbname=container.dbname,
user=container.username,
password=container.password,
host=container.get_container_host_ip(),
port=container.get_exposed_port(5432),
)
return types.TestContainerSQL(
container=types.TestContainerDocker(
id=container.get_wrapped_container().id,
host_configs={
"5432": types.TestContainerUrlConfig(
"postgresql",
container.get_container_host_ip(),
container.get_exposed_port(5432),
)
},
container_configs={
"5432": types.TestContainerUrlConfig(
"postgresql", container.get_wrapped_container().name, 5432
)
},
),
conn=connection,
env={
"SIGNOZ_SQLSTORE_PROVIDER": "postgres",
"SIGNOZ_SQLSTORE_POSTGRES_DSN": f"postgresql://{container.username}:{container.password}@{container.get_wrapped_container().name}:{5432}/{container.dbname}",
"SIGNOZ_SQLSTORE_POSTGRES_DBNAME": container.dbname,
"SIGNOZ_SQLSTORE_POSTGRES_USER": container.username,
"SIGNOZ_SQLSTORE_POSTGRES_PASSWORD": container.password,
},
)
def delete(container: types.TestContainerSQL):
client = docker.from_env()
try:
client.containers.get(container_id=container.container.id).stop()
client.containers.get(container_id=container.container.id).remove(v=True)
except docker.errors.NotFound:
logger.info(
"Skipping removal of Postgres, Postgres(%s) not found. Maybe it was manually removed?",
{"id": container.container.id},
)
def restore(cache: dict) -> types.TestContainerSQL:
container = types.TestContainerDocker.from_cache(cache["container"])
host_config = container.host_configs["5432"]
env = cache["env"]
connection = psycopg2.connect(
dbname=env["SIGNOZ_SQLSTORE_POSTGRES_DBNAME"],
user=env["SIGNOZ_SQLSTORE_POSTGRES_USER"],
password=env["SIGNOZ_SQLSTORE_POSTGRES_PASSWORD"],
host=host_config.address,
port=host_config.port,
)
return types.TestContainerSQL(
container=container,
conn=connection,
env=env,
)
return dev.wrap(
request,
pytestconfig,
"postgres",
lambda: types.TestContainerSQL(
container=types.TestContainerDocker(
id="", host_configs={}, container_configs={}
),
conn=None,
env={},
),
create,
delete,
restore,
)

View File

@ -1,21 +1,22 @@
import dataclasses
import platform
import time
from http import HTTPStatus
import docker
import docker.errors
import pytest
import requests
from testcontainers.core.container import DockerContainer, Network
from testcontainers.core.image import DockerImage
from fixtures import types
from fixtures import dev, types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(name="signoz", scope="package")
def signoz(
def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
network: Network,
zeus: types.TestContainerDocker,
sqlstore: types.TestContainerSQL,
@ -27,126 +28,135 @@ def signoz(
Package-scoped fixture for setting up SigNoz.
"""
dev = request.config.getoption("--dev")
if dev:
cached_signoz = pytestconfig.cache.get("signoz.container", None)
if cached_signoz:
self = types.TestContainerDocker(
host_config=types.TestContainerUrlConfig(
cached_signoz["host_config"]["scheme"],
cached_signoz["host_config"]["address"],
cached_signoz["host_config"]["port"],
),
container_config=types.TestContainerUrlConfig(
cached_signoz["container_config"]["scheme"],
cached_signoz["container_config"]["address"],
cached_signoz["container_config"]["port"],
),
)
return types.SigNoz(
self=self, sqlstore=sqlstore, telemetrystore=clickhouse, zeus=zeus
)
def create() -> types.SigNoz:
# Run the migrations for clickhouse
request.getfixturevalue("migrator")
# Run the migrations for clickhouse
request.getfixturevalue("migrator")
# Build the image
self = DockerImage(
path="../../",
dockerfile_path="cmd/enterprise/Dockerfile.integration",
tag="signoz:integration",
)
arch = platform.machine()
if arch == "x86_64":
arch = "amd64"
self.build(
buildargs={
"TARGETARCH": arch,
"ZEUSURL": zeus.container_config.base(),
}
)
env = (
{
"SIGNOZ_WEB_ENABLED": False,
"SIGNOZ_INSTRUMENTATION_LOGS_LEVEL": "debug",
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": False,
}
| sqlstore.env
| clickhouse.env
)
container = DockerContainer("signoz:integration")
for k, v in env.items():
container.with_env(k, v)
container.with_exposed_ports(8080)
container.with_network(network=network)
provider = request.config.getoption("--sqlstore-provider")
if provider == "sqlite":
container.with_volume_mapping(
sqlstore.env["SIGNOZ_SQLSTORE_SQLITE_PATH"],
sqlstore.env["SIGNOZ_SQLSTORE_SQLITE_PATH"],
"rw",
# Build the image
self = DockerImage(
path="../../",
dockerfile_path="cmd/enterprise/Dockerfile.integration",
tag="signoz:integration",
)
container.start()
arch = platform.machine()
if arch == "x86_64":
arch = "amd64"
def ready(container: DockerContainer) -> None:
for attempt in range(5):
try:
response = requests.get(
f"http://{container.get_container_host_ip()}:{container.get_exposed_port(8080)}/api/v1/health", # pylint: disable=line-too-long
timeout=2,
)
return response.status_code == HTTPStatus.OK
except Exception: # pylint: disable=broad-exception-caught
logger.info(
"Attempt %s at readiness check for SigNoz container %s failed, going to retry ...", # pylint: disable=line-too-long
attempt + 1,
container,
)
time.sleep(2)
raise TimeoutError("timeout exceeded while waiting")
self.build(
buildargs={
"TARGETARCH": arch,
"ZEUSURL": zeus.container_configs["8080"].base(),
}
)
try:
ready(container=container)
except Exception as e: # pylint: disable=broad-exception-caught
raise e
env = (
{
"SIGNOZ_WEB_ENABLED": True,
"SIGNOZ_WEB_DIRECTORY": "/root/web",
"SIGNOZ_INSTRUMENTATION_LOGS_LEVEL": "debug",
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": False,
}
| sqlstore.env
| clickhouse.env
)
def stop():
if dev:
logger.info("Skipping removal of SigNoz container %s ...", container)
return
else:
logger.info("Removing SigNoz container %s ...", container)
container.stop(delete_volume=True)
container = DockerContainer("signoz:integration")
for k, v in env.items():
container.with_env(k, v)
container.with_exposed_ports(8080)
container.with_network(network=network)
request.addfinalizer(stop)
provider = request.config.getoption("--sqlstore-provider")
if provider == "sqlite":
container.with_volume_mapping(
sqlstore.env["SIGNOZ_SQLSTORE_SQLITE_PATH"],
sqlstore.env["SIGNOZ_SQLSTORE_SQLITE_PATH"],
"rw",
)
cached_signoz = types.SigNoz(
self=types.TestContainerDocker(
host_config=types.TestContainerUrlConfig(
"http",
container.get_container_host_ip(),
container.get_exposed_port(8080),
container.start()
def ready(container: DockerContainer) -> None:
for attempt in range(10):
try:
response = requests.get(
f"http://{container.get_container_host_ip()}:{container.get_exposed_port(8080)}/api/v1/health",
timeout=2,
)
return response.status_code == HTTPStatus.OK
except Exception: # pylint: disable=broad-exception-caught
logger.info(
"Attempt %s at readiness check for SigNoz container %s failed, going to retry ...",
attempt + 1,
container,
)
time.sleep(2)
raise TimeoutError("timeout exceeded while waiting")
try:
ready(container=container)
except Exception as e: # pylint: disable=broad-exception-caught
raise e
return types.SigNoz(
self=types.TestContainerDocker(
id=container.get_wrapped_container().id,
host_configs={
"8080": types.TestContainerUrlConfig(
"http",
container.get_container_host_ip(),
container.get_exposed_port(8080),
)
},
container_configs={
"8080": types.TestContainerUrlConfig(
"http",
container.get_wrapped_container().name,
8080,
)
},
),
container_config=types.TestContainerUrlConfig(
"http",
container.get_wrapped_container().name,
8080,
sqlstore=sqlstore,
telemetrystore=clickhouse,
zeus=zeus,
)
def delete(container: types.SigNoz) -> None:
client = docker.from_env()
try:
client.containers.get(container_id=container.self.id).stop()
client.containers.get(container_id=container.self.id).remove(v=True)
except docker.errors.NotFound:
logger.info(
"Skipping removal of SigNoz, SigNoz(%s) not found. Maybe it was manually removed?",
{"id": container.self.id},
)
def restore(cache: dict) -> types.SigNoz:
self = types.TestContainerDocker.from_cache(cache)
return types.SigNoz(
self=self,
sqlstore=sqlstore,
telemetrystore=clickhouse,
zeus=zeus,
)
return dev.wrap(
request,
pytestconfig,
"signoz",
empty=lambda: types.SigNoz(
self=types.TestContainerDocker(
id="",
host_configs={},
container_configs={},
),
sqlstore=sqlstore,
telemetrystore=clickhouse,
zeus=zeus,
),
sqlstore=sqlstore,
telemetrystore=clickhouse,
zeus=zeus,
create=create,
delete=delete,
restore=restore,
)
if dev:
pytestconfig.cache.set(
"signoz.container", dataclasses.asdict(cached_signoz.self)
)
return cached_signoz

View File

@ -4,7 +4,7 @@ from typing import Any, Generator
import pytest
from fixtures import types
from fixtures import dev, types
ConnectionTuple = namedtuple("ConnectionTuple", "connection config")
@ -19,51 +19,52 @@ def sqlite(
Package-scoped fixture for SQLite.
"""
dev = request.config.getoption("--dev")
if dev:
container = pytestconfig.cache.get("sqlite.container", None)
env = pytestconfig.cache.get("sqlite.env", None)
def create() -> types.TestContainerSQL:
tmpdir = tmpfs("sqlite")
path = tmpdir / "signoz.db"
connection = sqlite3.connect(path, check_same_thread=False)
if container and env:
assert isinstance(container, dict)
assert isinstance(env, dict)
return types.TestContainerSQL(
container=types.TestContainerDocker(
id="",
host_configs={},
container_configs={},
),
conn=connection,
env={
"SIGNOZ_SQLSTORE_PROVIDER": "sqlite",
"SIGNOZ_SQLSTORE_SQLITE_PATH": str(path),
},
)
return types.TestContainerSQL(
container=types.TestContainerDocker(
host_config=None,
container_config=None,
),
conn=sqlite3.connect(
env["SIGNOZ_SQLSTORE_SQLITE_PATH"], check_same_thread=False
),
env=env,
)
def delete(_: types.TestContainerSQL):
pass
tmpdir = tmpfs("sqlite")
path = tmpdir / "signoz.db"
connection = sqlite3.connect(path, check_same_thread=False)
def restore(cache: dict) -> types.TestContainerSQL:
path = cache["env"].get("SIGNOZ_SQLSTORE_SQLITE_PATH")
conn = sqlite3.connect(path, check_same_thread=False)
return types.TestContainerSQL(
container=types.TestContainerDocker(
id="",
host_configs={},
container_configs={},
),
conn=conn,
env=cache["env"],
)
def stop():
if dev:
return
connection.close()
request.addfinalizer(stop)
cached_sqlite = types.TestContainerSQL(
container=types.TestContainerDocker(
host_config=None,
container_config=None,
return dev.wrap(
request,
pytestconfig,
"sqlite",
lambda: types.TestContainerSQL(
container=types.TestContainerDocker(
id="", host_configs={}, container_configs={}
),
conn=None,
env={},
),
conn=connection,
env={
"SIGNOZ_SQLSTORE_PROVIDER": "sqlite",
"SIGNOZ_SQLSTORE_SQLITE_PATH": str(path),
},
create,
delete,
restore,
)
if dev:
pytestconfig.cache.set("sqlite.env", cached_sqlite.env)
return cached_sqlite

View File

@ -2,8 +2,11 @@ from dataclasses import dataclass
from typing import Dict
from urllib.parse import urljoin
import clickhouse_connect
import clickhouse_connect.driver
import clickhouse_connect.driver.client
import py
from clickhouse_driver.dbapi import Connection
from testcontainers.core.container import Network
LegacyPath = py.path.local
@ -21,12 +24,52 @@ class TestContainerUrlConfig:
def get(self, path: str) -> str:
return urljoin(self.base(), path)
def __cache__(self) -> dict:
return {
"scheme": self.scheme,
"address": self.address,
"port": self.port,
}
def __log__(self) -> str:
return f"TestContainerUrlConfig(scheme={self.scheme}, address={self.address}, port={self.port})"
@dataclass
class TestContainerDocker:
__test__ = False
host_config: TestContainerUrlConfig
container_config: TestContainerUrlConfig
id: str
host_configs: Dict[str, TestContainerUrlConfig]
container_configs: Dict[str, TestContainerUrlConfig]
@staticmethod
def from_cache(cache: dict) -> "TestContainerDocker":
return TestContainerDocker(
id=cache["id"],
host_configs={
port: TestContainerUrlConfig(**config)
for port, config in cache["host_configs"].items()
},
container_configs={
port: TestContainerUrlConfig(**config)
for port, config in cache["container_configs"].items()
},
)
def __cache__(self) -> dict:
return {
"id": self.id,
"host_configs": {
port: config.__cache__() for port, config in self.host_configs.items()
},
"container_configs": {
port: config.__cache__()
for port, config in self.container_configs.items()
},
}
def __log__(self) -> str:
return f"TestContainerDocker(id={self.id}, host_configs={', '.join(host_config.__log__() for host_config in self.host_configs.values())}, container_configs={', '.join(container_config.__log__() for container_config in self.container_configs.values())})"
@dataclass
@ -36,14 +79,32 @@ class TestContainerSQL:
conn: any
env: Dict[str, str]
def __cache__(self) -> dict:
return {
"container": self.container.__cache__(),
"env": self.env,
}
def __log__(self) -> str:
return f"TestContainerSQL(container={self.container.__log__()}, env={self.env})"
@dataclass
class TestContainerClickhouse:
__test__ = False
container: TestContainerDocker
conn: Connection
conn: clickhouse_connect.driver.client.Client
env: Dict[str, str]
def __cache__(self) -> dict:
return {
"container": self.container.__cache__(),
"env": self.env,
}
def __log__(self) -> str:
return f"TestContainerClickhouse(container={self.container.__log__()}, env={self.env})"
@dataclass
class SigNoz:
@ -52,3 +113,32 @@ class SigNoz:
sqlstore: TestContainerSQL
telemetrystore: TestContainerClickhouse
zeus: TestContainerDocker
def __cache__(self) -> dict:
return self.self.__cache__()
def __log__(self) -> str:
return f"SigNoz(self={self.self.__log__()}, sqlstore={self.sqlstore.__log__()}, telemetrystore={self.telemetrystore.__log__()}, zeus={self.zeus.__log__()})"
@dataclass
class Operation:
__test__ = False
name: str
def __cache__(self) -> dict:
return {"name": self.name}
def __log__(self) -> str:
return f"Operation(name={self.name})"
class Network(Network): # pylint: disable=function-redefined
def __cache__(self) -> dict:
return {
"id": self.id,
"name": self.name,
}
def __log__(self) -> str:
return f"Network(id={self.id}, name={self.name})"

View File

@ -1,9 +1,12 @@
import dataclasses
import docker
import docker.errors
import pytest
from testcontainers.core.container import DockerContainer, Network
from fixtures import types
from fixtures import dev, types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(name="zookeeper", scope="package")
@ -14,54 +17,53 @@ def zookeeper(
Package-scoped fixture for Zookeeper TestContainer.
"""
dev = request.config.getoption("--dev")
if dev:
cached_zookeeper = pytestconfig.cache.get("zookeeper", None)
if cached_zookeeper:
return types.TestContainerDocker(
host_config=types.TestContainerUrlConfig(
cached_zookeeper["host_config"]["scheme"],
cached_zookeeper["host_config"]["address"],
cached_zookeeper["host_config"]["port"],
),
container_config=types.TestContainerUrlConfig(
cached_zookeeper["container_config"]["scheme"],
cached_zookeeper["container_config"]["address"],
cached_zookeeper["container_config"]["port"],
),
def create() -> types.TestContainerDocker:
version = request.config.getoption("--zookeeper-version")
container = DockerContainer(image=f"bitnami/zookeeper:{version}")
container.with_env("ALLOW_ANONYMOUS_LOGIN", "yes")
container.with_exposed_ports(2181)
container.with_network(network=network)
container.start()
return types.TestContainerDocker(
id=container.get_wrapped_container().id,
host_configs={
"2181": types.TestContainerUrlConfig(
scheme="tcp",
address=container.get_container_host_ip(),
port=container.get_exposed_port(2181),
)
},
container_configs={
"2181": types.TestContainerUrlConfig(
scheme="tcp",
address=container.get_wrapped_container().name,
port=2181,
)
},
)
def delete(container: types.TestContainerDocker):
client = docker.from_env()
try:
client.containers.get(container_id=container.id).stop()
client.containers.get(container_id=container.id).remove(v=True)
except docker.errors.NotFound:
logger.info(
"Skipping removal of Zookeeper, Zookeeper(%s) not found. Maybe it was manually removed?",
{"id": container.id},
)
version = request.config.getoption("--zookeeper-version")
def restore(cache: dict) -> types.TestContainerDocker:
return types.TestContainerDocker.from_cache(cache)
container = DockerContainer(image=f"bitnami/zookeeper:{version}")
container.with_env("ALLOW_ANONYMOUS_LOGIN", "yes")
container.with_exposed_ports(2181)
container.with_network(network=network)
container.start()
def stop():
if dev:
return
container.stop(delete_volume=True)
request.addfinalizer(stop)
cached_zookeeper = types.TestContainerDocker(
host_config=types.TestContainerUrlConfig(
"tcp",
container.get_container_host_ip(),
container.get_exposed_port(2181),
),
container_config=types.TestContainerUrlConfig(
"tcp",
container.get_wrapped_container().name,
2181,
),
return dev.wrap(
request,
pytestconfig,
"zookeeper",
lambda: types.TestContainerDocker(id="", host_configs={}, container_configs={}),
create,
delete,
restore,
)
if dev:
pytestconfig.cache.set("zookeeper", dataclasses.asdict(cached_zookeeper))
return cached_zookeeper

View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
[[package]]
name = "astroid"
@ -33,7 +33,7 @@ version = "25.1.0"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.9"
groups = ["main"]
groups = ["dev"]
files = [
{file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"},
{file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"},
@ -84,6 +84,87 @@ files = [
{file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"},
]
[[package]]
name = "cffi"
version = "1.17.1"
description = "Foreign Function Interface for Python calling C code."
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "platform_python_implementation == \"PyPy\""
files = [
{file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
{file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
{file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
{file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
{file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
{file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
{file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
{file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
{file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
{file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
{file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
{file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
{file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
{file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
{file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
{file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
{file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
{file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
{file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
{file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
{file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
{file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
{file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
{file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
{file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
{file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
]
[package.dependencies]
pycparser = "*"
[[package]]
name = "charset-normalizer"
version = "3.4.1"
@ -188,141 +269,115 @@ files = [
[[package]]
name = "click"
version = "8.1.8"
version = "8.2.1"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
groups = ["main"]
python-versions = ">=3.10"
groups = ["dev"]
files = [
{file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
{file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"},
{file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"},
{file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"},
]
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "clickhouse-driver"
version = "0.2.9"
description = "Python driver with native interface for ClickHouse"
name = "clickhouse-connect"
version = "0.8.18"
description = "ClickHouse Database Core Driver for Python, Pandas, and Superset"
optional = false
python-versions = "<4,>=3.7"
python-versions = "~=3.8"
groups = ["main"]
files = [
{file = "clickhouse-driver-0.2.9.tar.gz", hash = "sha256:050ea4870ead993910b39e7fae965dc1c347b2e8191dcd977cd4b385f9e19f87"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ce04e9d0d0f39561f312d1ac1a8147bc9206e4267e1a23e20e0423ebac95534"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7ae5c8931bf290b9d85582e7955b9aad7f19ff9954e48caa4f9a180ea4d01078"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e51792f3bd12c32cb15a907f12de3c9d264843f0bb33dce400e3966c9f09a3f"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42fc546c31e4a04c97b749769335a679c9044dc693fa7a93e38c97fd6727173d"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a383a403d185185c64e49edd6a19b2ec973c5adcb8ebff7ed2fc539a2cc65a5"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f05321a97e816afc75b3e4f9eda989848fecf14ecf1a91d0f22c04258123d1f7"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be47e793846aac28442b6b1c6554e0731b848a5a7759a54aa2489997354efe4a"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:780e42a215d1ae2f6d695d74dd6f087781fb2fa51c508b58f79e68c24c5364e0"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9e28f1fe850675e173db586e9f1ac790e8f7edd507a4227cd54cd7445f8e75b6"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:125aae7f1308d3083dadbb3c78f828ae492e060f13e4007a0cf53a8169ed7b39"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2f3c4fbb61e75c62a1ab93a1070d362de4cb5682f82833b2c12deccb3bae888d"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dc03196a84e32d23b88b665be69afae98f57426f5fdf203e16715b756757961"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-win32.whl", hash = "sha256:25695d78a1d7ad6e221e800612eac08559f6182bf6dee0a220d08de7b612d993"},
{file = "clickhouse_driver-0.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:367acac95398d721a0a2a6cf87e93638c5588b79498a9848676ce7f182540a6c"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a7353a7a08eee3aa0001d8a5d771cb1f37e2acae1b48178002431f23892121a"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6af1c6cbc3481205503ab72a34aa76d6519249c904aa3f7a84b31e7b435555be"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48033803abd1100bfff6b9a1769d831b672cd3cda5147e0323b956fd1416d38d"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f202a58a540c85e47c31dabc8f84b6fe79dca5315c866450a538d58d6fa0571"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4df50fd84bfa4aa1eb7b52d48136066bfb64fabb7ceb62d4c318b45a296200b"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:433a650571a0d7766eb6f402e8f5930222997686c2ee01ded22f1d8fd46af9d4"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:232ee260475611cbf7adb554b81db6b5790b36e634fe2164f4ffcd2ca3e63a71"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:09049f7e71f15c9c9a03f597f77fc1f7b61ababd155c06c0d9e64d1453d945d7"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:424153d1d5f5a807f596a48cc88119f9fb3213ca7e38f57b8d15dcc964dd91f7"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4f078fd1cf19c4ca63b8d1e0803df665310c8d5b644c5b02bf2465e8d6ef8f55"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f138d939e26e767537f891170b69a55a88038919f5c10d8865b67b8777fe4848"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9aafabc7e32942f85dcb46f007f447ab69024831575df97cae28c6ed127654d1"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-win32.whl", hash = "sha256:935e16ebf1a1998d8493979d858821a755503c9b8af572d9c450173d4b88868c"},
{file = "clickhouse_driver-0.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:306b3102cba278b5dfec6f5f7dc8b78416c403901510475c74913345b56c9e42"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fcb2fd00e58650ae206a6d5dbc83117240e622471aa5124733fbf2805eb8bda0"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7a3e6b0a1eb218e3d870a94c76daaf65da46dca8f6888ea6542f94905c24d88"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8d8e2888a857d8db3d98765a5ad23ab561241feaef68bbffc5a0bd9c142342"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85d50c011467f5ff6772c4059345968b854b72e07a0219030b7c3f68419eb7f7"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93b395c1370629ccce8fb3e14cd5be2646d227bd32018c21f753c543e9a7e96b"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dbcee870c60d9835e5dce1456ab6b9d807e6669246357f4b321ef747b90fa43"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fffa5a5f317b1ec92e406a30a008929054cf3164d2324a3c465d0a0330273bf8"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:476702740a279744badbd177ae1c4a2d089ec128bd676861219d1f92078e4530"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5cd6d95fab5ff80e9dc9baedc9a926f62f74072d42d5804388d63b63bec0bb63"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:05027d32d7cf3e46cb8d04f8c984745ae01bd1bc7b3579f9dadf9b3cca735697"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:3d11831842250b4c1b26503a6e9c511fc03db096608b7c6af743818c421a3032"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:81b4b671b785ebb0b8aeabf2432e47072413d81db959eb8cfd8b6ab58c5799c6"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-win32.whl", hash = "sha256:e893bd4e014877174a59e032b0e99809c95ec61328a0e6bd9352c74a2f6111a8"},
{file = "clickhouse_driver-0.2.9-cp312-cp312-win_amd64.whl", hash = "sha256:de6624e28eeffd01668803d28ae89e3d4e359b1bff8b60e4933e1cb3c6f86f18"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:909205324089a9ee59bee7ecbfa94595435118cca310fd62efdf13f225aa2965"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f31d6e47dc2b0f367f598f5629147ed056d7216c1788e25190fcfbfa02e749"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed84179914b2b7bb434c2322a6e7fd83daa681c97a050450511b66d917a129bb"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67d1bf63efb4ba14ae6c6da99622e4a549e68fc3ee14d859bf611d8e6a61b3fa"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eed23ea41dd582d76f7a2ec7e09cbe5e9fec008f11a4799fa35ce44a3ebd283"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a654291132766efa2703058317749d7c69b69f02d89bac75703eaf7f775e20da"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c26c5ef16d0ef3cabc5bc03e827e01b0a4afb5b4eaf8850b7cf740cee04a1d4"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b57e83d7986d3cbda6096974a9510eb53cb33ad9072288c87c820ba5eee3370e"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:153cc03b36f22cbde55aa6a5bbe99072a025567a54c48b262eb0da15d8cd7c83"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:83a857d99192936091f495826ae97497cd1873af213b1e069d56369fb182ab8e"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb05a9bb22cbe9ad187ad268f86adf7e60df6083331fe59c01571b7b725212dd"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-win32.whl", hash = "sha256:3e282c5c25e32d96ed151e5460d2bf4ecb805ea64449197dd918e84e768016df"},
{file = "clickhouse_driver-0.2.9-cp37-cp37m-win_amd64.whl", hash = "sha256:c46dccfb04a9afd61a1b0e60bfefceff917f76da2c863f9b36b39248496d5c77"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:612ca9028c718f362c97f552e63d313cf1a70a616ef8532ddb0effdaf12ebef9"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471b884d318e012f68d858476052742048918854f7dfe87d78e819f87a848ffb"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58ee63c35e99da887eb035c8d6d9e64fd298a0efc1460395297dd5cc281a6912"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0819bb63d2c5025a1fb9589f57ef82602687cef11081d6dfa6f2ce44606a1772"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6680ee18870bca1fbab1736c8203a965efaec119ab4c37821ad99add248ee08"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:713c498741b54debd3a10a5529e70b6ed85ca33c3e8629e24ae5cd8160b5a5f2"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:730837b8f63941065c9c955c44286aef0987fb084ffb3f55bf1e4fe07df62269"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9f4e38b2ea09214c8e7848a19391009a18c56a3640e1ba1a606b9e57aeb63404"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:457f1d6639e0345b717ae603c79bd087a35361ce68c1c308d154b80b841e5e7d"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:49a55aeb8ea625a87965a96e361bbb1ad67d0931bfb2a575f899c1064e70c2da"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9230058d8c9b1a04079afae4650fb67745f0f1c39db335728f64d48bd2c19246"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8798258bd556542dd9c6b8ebe62f9c5110c9dcdf97c57fb077e7b8b6d6da0826"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-win32.whl", hash = "sha256:ce8e3f4be46bcc63555863f70ab0035202b082b37e6f16876ef50e7bc4b47056"},
{file = "clickhouse_driver-0.2.9-cp38-cp38-win_amd64.whl", hash = "sha256:2d982959ff628255808d895a67493f2dab0c3a9bfc65eeda0f00c8ae9962a1b3"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a46b227fab4420566ed24ee70d90076226d16fcf09c6ad4d428717efcf536446"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7eaa2ce5ea08cf5fddebb8c274c450e102f329f9e6966b6cd85aa671c48e5552"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97f0083194d6e23b5ef6156ed0d5388c37847b298118199d7937ba26412a9e2"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6cab5cdbb0f8ee51d879d977b78f07068b585225ac656f3c081896c362e8f83"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdb1b011a53ee71539e9dc655f268b111bac484db300da92829ed59e910a8fd0"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf51bb761b281d20910b4b689c699ef98027845467daa5bb5dfdb53bd6ee404"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8ea462e3cebb121ff55002e9c8a9a0a3fd9b5bbbf688b4960f0a83c0172fb31"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:70bee21c245226ad0d637bf470472e2d487b86911b6d673a862127b934336ff4"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:253a3c223b944d691bf0abbd599f592ea3b36f0a71d2526833b1718f37eca5c2"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a6549b53fc5c403dc556cb39b2ae94d73f9b113daa00438a660bb1dd5380ae4d"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1c685cd4abe61af1c26279ff04b9f567eb4d6c1ec7fb265af7481b1f153043aa"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7e25144219577491929d032a6c3ddd63c6cd7fa764af829a5637f798190d9b26"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-win32.whl", hash = "sha256:0b9925610d25405a8e6d83ff4f54fc2456a121adb0155999972f5edd6ba3efc8"},
{file = "clickhouse_driver-0.2.9-cp39-cp39-win_amd64.whl", hash = "sha256:b243de483cfa02716053b0148d73558f4694f3c27b97fc1eaa97d7079563a14d"},
{file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45a3d5b1d06750fd6a18c29b871494a2635670099ec7693e756a5885a4a70dbf"},
{file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8415ffebd6ca9eef3024763abc450f8659f1716d015bd563c537d01c7fbc3569"},
{file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace48db993aa4bd31c42de0fa8d38c94ad47405916d6b61f7a7168a48fb52ac1"},
{file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b07123334fe143bfe6fa4e3d4b732d647d5fd2cfb9ec7f2f76104b46fe9d20c6"},
{file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2af3efa73d296420ce6362789f5b1febf75d4aa159a479393f01549115509d5"},
{file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:baf57eede88d07a1eb04352d26fc58a4d97991ca3d8840f7c5d48691dec9f251"},
{file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:275d0ccdab9c3571bdb3e9acfab4497930aa584ff2766b035bb2f854deaf8b82"},
{file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:293da77bfcac3168fb35b27c242f97c1a05502435c0686ecbb8e2e4abcb3de26"},
{file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d6c2e5830705e4eeef33070ca4d5a24dfa221f28f2f540e5e6842c26e70b10b"},
{file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:11934bd78d97dd7e1a23a6222b5edd1e1b4d34e1ead5c846dc2b5c56fdc35ff5"},
{file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b802b6f0fbdcc3ab81b87f09b694dde91ab049f44d1d2c08c3dc8ea9a5950cfa"},
{file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7af871c5315eb829ecf4533c790461ea8f73b3bfd5f533b0467e479fdf6ddcfd"},
{file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d577dd4867b9e26cf60590e1f500990c8701a6e3cfbb9e644f4d0c0fb607028"},
{file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ed3dea2d1eca85fef5b8564ddd76dedb15a610c77d55d555b49d9f7c896b64b"},
{file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:91ec96f2c48e5bdeac9eea43a9bc9cc19acb2d2c59df0a13d5520dfc32457605"},
{file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7667ab423452754f36ba8fb41e006a46baace9c94e2aca2a745689b9f2753dfb"},
{file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:653583b1f3b088d106f180d6f02c90917ecd669ec956b62903a05df4a7f44863"},
{file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef3dd0cbdf2f0171caab90389af0ede068ec802bf46c6a77f14e6edc86671bc"},
{file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11b1833ee8ff8d5df39a34a895e060b57bd81e05ea68822bc60476daff4ce1c8"},
{file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8a3195639e6393b9d4aafe736036881ff86b6be5855d4bf7d9f5c31637181ec3"},
{file = "clickhouse_connect-0.8.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a827fdd96604b3f2d4dd597fbea01f41e3df13841b995b871ee2d590df651fe"},
{file = "clickhouse_connect-0.8.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7613f38e8d1fbd98baef94e68af603de3eec4f45d6aa6874d86cc04255bde08a"},
{file = "clickhouse_connect-0.8.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcacaee9d4cb6525e2d9865bdc103d9e243b5eca77a794203c5822b201cddc63"},
{file = "clickhouse_connect-0.8.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1b9619c0d4b1c5e02fbd7b8e95c29979fd2b56a919f1259213452e2cc93a3fc"},
{file = "clickhouse_connect-0.8.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41748b395cc11d1bd645a998c08f9e504d7c4a80fb1081f2db57d984b7808ba0"},
{file = "clickhouse_connect-0.8.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5ea2e520fa9abec2f2359244249516865c399329c79f0a7b78ab965fcfd7a9fb"},
{file = "clickhouse_connect-0.8.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:48b79d1f3dec02d7cc36bdac168d316ca8d634a1f88168e471972bd241527162"},
{file = "clickhouse_connect-0.8.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:64bc352ec1505b35574c6fdf3f245be5231e98c0a40a4385fd7df9e7d17c881c"},
{file = "clickhouse_connect-0.8.18-cp310-cp310-win32.whl", hash = "sha256:19ebc7773c428d0c34228ab483aecfd9c97bb1eae679f04843822354a25fe76e"},
{file = "clickhouse_connect-0.8.18-cp310-cp310-win_amd64.whl", hash = "sha256:34fc8e0a2be6e61da6463b812154cf55b9b43e20841bc0c88d8067ed19daa6ce"},
{file = "clickhouse_connect-0.8.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eebfa66b35801a35c8a96c53c5a42ea647791c2e90c6f0960b7baad6e7c009eb"},
{file = "clickhouse_connect-0.8.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cff5150dfde8e69d7a9850d7fede971b35ec61c2b90ee1d7023f63f75f262ea7"},
{file = "clickhouse_connect-0.8.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fe4871a17f4d144a754c48b7a5ff77066fb8a07b72101ec5f92a54590670f6f"},
{file = "clickhouse_connect-0.8.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08b8d039b944a364264121342a6678fcc49ef2e1f56359d4cd8b03e31e221b40"},
{file = "clickhouse_connect-0.8.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ea9073fc6d91db6c6bd87322a0cc00dc0aae5e6479371945f2aa9370c3412da"},
{file = "clickhouse_connect-0.8.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b70530f6c16fca4faa1fd354e8cf42f4fae363e1f48da2d1b3713221327251a5"},
{file = "clickhouse_connect-0.8.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ea0e7716a9a635cbd54ad9f59f55c3c08dfb4cbf2f7b796208bbe49a8dd25fe7"},
{file = "clickhouse_connect-0.8.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:128b0be2362e2011ba5913fe9def98c639990fb88cdbee2b68b2675193acdac0"},
{file = "clickhouse_connect-0.8.18-cp311-cp311-win32.whl", hash = "sha256:9288c86bf3b4ffc002e0f21eb81e49b5504e39472d35fd79075619bb1c5e0b08"},
{file = "clickhouse_connect-0.8.18-cp311-cp311-win_amd64.whl", hash = "sha256:0089f2430647d0f4929445547646734c2190a505bf4918054a9626d1f22827e1"},
{file = "clickhouse_connect-0.8.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:21570aa28c0a9753a8172f88fbe492dcc903f5162798725a04920e319b4771bb"},
{file = "clickhouse_connect-0.8.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9494b4d2c74f94ad05ca17ebe91960396af7f45922ba908931eace77b53acca"},
{file = "clickhouse_connect-0.8.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d438a6d6461f5bdb37344b668049a78e1a0f3353987a1e649001d075196fd688"},
{file = "clickhouse_connect-0.8.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:777dfbea92984e1c834a75499f459823edb9e7afde9ed62281455edb5d7be577"},
{file = "clickhouse_connect-0.8.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2eb88acedc3802fb39b2b3eb48b76ec7c7cc895c189b3ac031f880ee12e82bd9"},
{file = "clickhouse_connect-0.8.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:83ae184507c671c3d833e688bd1445bc0fd62a4cf6db6cbaf9f8aaebd4134921"},
{file = "clickhouse_connect-0.8.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a444ea5e14135c88349223af00d124e09f3f77384180ca7df96b4aafd8b6e9ee"},
{file = "clickhouse_connect-0.8.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f0594ee5a261f2b89823f049edb7aebfa4cf3262ab324a76e462de0fd404320"},
{file = "clickhouse_connect-0.8.18-cp312-cp312-win32.whl", hash = "sha256:6e087bc4162d156fc040678454e5eb6160f72d470e3817906128069a7881af7d"},
{file = "clickhouse_connect-0.8.18-cp312-cp312-win_amd64.whl", hash = "sha256:7dfd1280d62f24ff8f991953487958d4b97dd2435fea9d680f7f193049ed7e81"},
{file = "clickhouse_connect-0.8.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fbb56d86b6e26016e24a260a2280c9831736797cf8eda8594551e25fa91a1b0e"},
{file = "clickhouse_connect-0.8.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8e9b47475f82ad31c4981cb5284e6a7d9869dda7c295d69ba650011ec6b6c64e"},
{file = "clickhouse_connect-0.8.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da68a0cece7a350d6ebeacc5934406c78c381d6bf6a598765d56f24b65b1ec85"},
{file = "clickhouse_connect-0.8.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0036960dcb6bf88b262e9b461ddafb7597c0beb36700aab1e74dde64f02c8cfd"},
{file = "clickhouse_connect-0.8.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcd291e920223edb3152bc4e7720b2b514b7a48fb18bb0c3346f66b2691add67"},
{file = "clickhouse_connect-0.8.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba1747a569b87f693d2c470faaaf83e02a24792c3533502f92b11ada672c2a6f"},
{file = "clickhouse_connect-0.8.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:63c9c1ab6899ff916f4751b1d7604b6e81c3a63f169ec538429be072905cf7c3"},
{file = "clickhouse_connect-0.8.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:38bdf84bbbb784ce1ffb813edc1ddc7622cea3cc6ca59f336f82c3365fed36ef"},
{file = "clickhouse_connect-0.8.18-cp313-cp313-win32.whl", hash = "sha256:252549ed7596baaf955699f7713ff171cb21292ea333ff01cc295d7bbf20a4d9"},
{file = "clickhouse_connect-0.8.18-cp313-cp313-win_amd64.whl", hash = "sha256:a7915cdd844d083905b5fe4f9139c65aa033652a670986381dc2e2b885108266"},
{file = "clickhouse_connect-0.8.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e62a847fc7827ee863af62e8c93a4bf0e325902b2b1863f0b5b016ad1a76bba3"},
{file = "clickhouse_connect-0.8.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1aa99e6192e8414b9fc579d5ba64e83c82953400e3b6d67604a441c2ff67c7e4"},
{file = "clickhouse_connect-0.8.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c98fdb56d4e0c54602319f4bd3ae3010b4ac81eab570591d502fbbd70557baba"},
{file = "clickhouse_connect-0.8.18-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:531a6479c42ebb1a497e18f64308a6ea6c894843f0c7021f95b3b33e1738a2ff"},
{file = "clickhouse_connect-0.8.18-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:8a0b3591ca43dc15115f8a8bce6db70c70fa99caa3f61849189ceeec7895545b"},
{file = "clickhouse_connect-0.8.18-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6f406161608243657027118efb12efca00889cc20c3e1d16b1c0f8b98079ccf5"},
{file = "clickhouse_connect-0.8.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fab5ae60994a4702c5e333baea2e26c1d0abf4c2ccc0a7cb03a9a15197f04ab8"},
{file = "clickhouse_connect-0.8.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:80c58ae5e978a3cdd840ffc1f4179c2b08cf440b72e272aecb1b08ddc6d82644"},
{file = "clickhouse_connect-0.8.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9481e01cb303fc7bf6bef79382c4eca2e0ffd0ef01c056f7968aa7635b21c67"},
{file = "clickhouse_connect-0.8.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11b220d5b060fad1cfeb5f58c39db0212d995b7310ec3c97241f77ebb208312a"},
{file = "clickhouse_connect-0.8.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fefcbb5405f4ec803a016718958e4d8e5a3e548b9d3cc5a8ca6b2b327328ccaf"},
{file = "clickhouse_connect-0.8.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7dc23183af888e9dc85b46c2a543504ad23add2c6f87b9ee5a5353ebd10e5215"},
{file = "clickhouse_connect-0.8.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ac04864651f42f7bb3159c93c6f135854982d829d5abdbea7dd866a39a8dddeb"},
{file = "clickhouse_connect-0.8.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f754a26e56f60941ab5afff6eeaaeaa8de10354a6b80ca759b4366368df185b0"},
{file = "clickhouse_connect-0.8.18-cp39-cp39-win32.whl", hash = "sha256:35418b601c8f05ce4dafc56ad43d7ebb6c1ab8153cc420d5caf87e1fce3926ac"},
{file = "clickhouse_connect-0.8.18-cp39-cp39-win_amd64.whl", hash = "sha256:4c8167e61b7596825599d377e65026419ec3483700d2a021eb7a7b4fa9bb029e"},
{file = "clickhouse_connect-0.8.18-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4067fd9a47481c4fd612813398c452d38473bae886b38f5baf9d8ee576797a"},
{file = "clickhouse_connect-0.8.18-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:845342e60d43d1b69dd3c90c156135b3e1687bef5552cb3f3dfc3031731077b2"},
{file = "clickhouse_connect-0.8.18-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64424d5bf8ee9f8ec260ba8c6ae810f11bd9dbf049bdee6a28a72832ce8d347c"},
{file = "clickhouse_connect-0.8.18-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e85ced109bf1f2cf5afe5eb7b658c48be62964c1981a8eb4ae2f872827809e63"},
{file = "clickhouse_connect-0.8.18-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87e423a22f48249fb97654d0b90653e3cf2fe4b52c535e2448b568f5a30f3010"},
{file = "clickhouse_connect-0.8.18-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2e4585e7f0fc0a963a199f967924096ea4250473b080548adf816f3ce298c418"},
{file = "clickhouse_connect-0.8.18-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:186e8327166c26399290f529d94631615fb71885e6adc90fdb8128c7f5d4dc68"},
{file = "clickhouse_connect-0.8.18-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:14c0cb61cb53268ca90faebf0dd216ce8622978dca3c9e901a980b1f00a5d392"},
{file = "clickhouse_connect-0.8.18-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:bcbe8c6a104ebb7098db13bca7cbfb02b5af3e2c1af55708bfbb6ba7570d2804"},
{file = "clickhouse_connect-0.8.18-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:83b1fe08a655e4d26a64b3a3a5e06327cc2a02b5c7ccb8e3e0070b3880720641"},
{file = "clickhouse_connect-0.8.18-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:eae5f9f6d8617f52b4b44b8f6e53ac77785fc382afe788d6ae2dc9e87a403d1a"},
{file = "clickhouse_connect-0.8.18-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff90c27ea43c11cb8878139068fed719f0141cd19ff344b53044ddc08765b3bc"},
{file = "clickhouse_connect-0.8.18-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40a02bafa09ead516330874e1f1fa6b104f2eed209b37b6e7fbf3b83465c98da"},
{file = "clickhouse_connect-0.8.18-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a05e91e04376b17c5d5aef7b70e0bd968e294384581a7060f9b469765fb539c"},
{file = "clickhouse_connect-0.8.18-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2e13cf6dd9f6a0107af3819477c336469e09df5c900433640c168f92829fd8ae"},
{file = "clickhouse_connect-0.8.18.tar.gz", hash = "sha256:206a33decf2d9ed689d3156ef906dc06f1db7eabfe512e3552e08e9e86b4c73a"},
]
[package.dependencies]
certifi = "*"
lz4 = "*"
pytz = "*"
tzlocal = "*"
urllib3 = ">=1.26"
zstandard = "*"
[package.extras]
lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4 (<=3.0.1) ; implementation_name == \"pypy\"", "lz4 ; implementation_name != \"pypy\""]
numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"]
zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"]
arrow = ["pyarrow"]
numpy = ["numpy"]
orjson = ["orjson"]
pandas = ["pandas"]
sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
tzlocal = ["tzlocal (>=4.0)"]
[[package]]
name = "colorama"
@ -335,7 +390,7 @@ files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "sys_platform == \"win32\""}
markers = {main = "sys_platform == \"win32\"", dev = "sys_platform == \"win32\" or platform_system == \"Windows\""}
[[package]]
name = "dill"
@ -435,6 +490,62 @@ files = [
colors = ["colorama"]
plugins = ["setuptools"]
[[package]]
name = "lz4"
version = "4.4.4"
description = "LZ4 Bindings for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "lz4-4.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f170abb8416c4efca48e76cac2c86c3185efdf841aecbe5c190121c42828ced0"},
{file = "lz4-4.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d33a5105cd96ebd32c3e78d7ece6123a9d2fb7c18b84dec61f27837d9e0c496c"},
{file = "lz4-4.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ebbc5b76b4f0018988825a7e9ce153be4f0d4eba34e6c1f2fcded120573e88"},
{file = "lz4-4.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc64d6dfa7a89397529b22638939e70d85eaedc1bd68e30a29c78bfb65d4f715"},
{file = "lz4-4.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a355223a284f42a723c120ce68827de66d5cb872a38732b3d5abbf544fa2fe26"},
{file = "lz4-4.4.4-cp310-cp310-win32.whl", hash = "sha256:b28228197775b7b5096898851d59ef43ccaf151136f81d9c436bc9ba560bc2ba"},
{file = "lz4-4.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:45e7c954546de4f85d895aa735989d77f87dd649f503ce1c8a71a151b092ed36"},
{file = "lz4-4.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:e3fc90f766401684740978cd781d73b9685bd81b5dbf7257542ef9de4612e4d2"},
{file = "lz4-4.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ddfc7194cd206496c445e9e5b0c47f970ce982c725c87bd22de028884125b68f"},
{file = "lz4-4.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:714f9298c86f8e7278f1c6af23e509044782fa8220eb0260f8f8f1632f820550"},
{file = "lz4-4.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8474c91de47733856c6686df3c4aca33753741da7e757979369c2c0d32918ba"},
{file = "lz4-4.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80dd27d7d680ea02c261c226acf1d41de2fd77af4fb2da62b278a9376e380de0"},
{file = "lz4-4.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b7d6dddfd01b49aedb940fdcaf32f41dc58c926ba35f4e31866aeec2f32f4f4"},
{file = "lz4-4.4.4-cp311-cp311-win32.whl", hash = "sha256:4134b9fd70ac41954c080b772816bb1afe0c8354ee993015a83430031d686a4c"},
{file = "lz4-4.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:f5024d3ca2383470f7c4ef4d0ed8eabad0b22b23eeefde1c192cf1a38d5e9f78"},
{file = "lz4-4.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:6ea715bb3357ea1665f77874cf8f55385ff112553db06f3742d3cdcec08633f7"},
{file = "lz4-4.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:23ae267494fdd80f0d2a131beff890cf857f1b812ee72dbb96c3204aab725553"},
{file = "lz4-4.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fff9f3a1ed63d45cb6514bfb8293005dc4141341ce3500abdfeb76124c0b9b2e"},
{file = "lz4-4.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ea7f07329f85a8eda4d8cf937b87f27f0ac392c6400f18bea2c667c8b7f8ecc"},
{file = "lz4-4.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ccab8f7f7b82f9fa9fc3b0ba584d353bd5aa818d5821d77d5b9447faad2aaad"},
{file = "lz4-4.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43e9d48b2daf80e486213128b0763deed35bbb7a59b66d1681e205e1702d735"},
{file = "lz4-4.4.4-cp312-cp312-win32.whl", hash = "sha256:33e01e18e4561b0381b2c33d58e77ceee850a5067f0ece945064cbaac2176962"},
{file = "lz4-4.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d21d1a2892a2dcc193163dd13eaadabb2c1b803807a5117d8f8588b22eaf9f12"},
{file = "lz4-4.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:2f4f2965c98ab254feddf6b5072854a6935adab7bc81412ec4fe238f07b85f62"},
{file = "lz4-4.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed6eb9f8deaf25ee4f6fad9625d0955183fdc90c52b6f79a76b7f209af1b6e54"},
{file = "lz4-4.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:18ae4fe3bafb344dbd09f976d45cbf49c05c34416f2462828f9572c1fa6d5af7"},
{file = "lz4-4.4.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fd20c5fc1a49d1bbd170836fccf9a338847e73664f8e313dce6ac91b8c1e02"},
{file = "lz4-4.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9cb387c33f014dae4db8cb4ba789c8d2a0a6d045ddff6be13f6c8d9def1d2a6"},
{file = "lz4-4.4.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0be9f68240231e1e44118a4ebfecd8a5d4184f0bdf5c591c98dd6ade9720afd"},
{file = "lz4-4.4.4-cp313-cp313-win32.whl", hash = "sha256:e9ec5d45ea43684f87c316542af061ef5febc6a6b322928f059ce1fb289c298a"},
{file = "lz4-4.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:a760a175b46325b2bb33b1f2bbfb8aa21b48e1b9653e29c10b6834f9bb44ead4"},
{file = "lz4-4.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:f4c21648d81e0dda38b4720dccc9006ae33b0e9e7ffe88af6bf7d4ec124e2fba"},
{file = "lz4-4.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd1add57b6fe1f96bed2d529de085e9378a3ac04b86f116d10506f85b68e97fc"},
{file = "lz4-4.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:900912e8a7cf74b4a2bea18a3594ae0bf1138f99919c20017167b6e05f760aa4"},
{file = "lz4-4.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:017f8d269a739405a59d68a4d63d23a8df23e3bb2c70aa069b7563af08dfdffb"},
{file = "lz4-4.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac522788296a9a02a39f620970dea86c38e141e21e51238f1b5e9fa629f8e69"},
{file = "lz4-4.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b56aa9eef830bf6443acd8c4e18b208a8993dc32e0d6ef4263ecfa6afb3f599"},
{file = "lz4-4.4.4-cp39-cp39-win32.whl", hash = "sha256:585b42eb37ab16a278c3a917ec23b2beef175aa669f4120142b97aebf90ef775"},
{file = "lz4-4.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:4ab1537bd3b3bfbafd3c8847e06827129794488304f21945fc2f5b669649d94f"},
{file = "lz4-4.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:38730927ad51beb42ab8dbc5555270bfbe86167ba734265f88bbd799fced1004"},
{file = "lz4-4.4.4.tar.gz", hash = "sha256:070fd0627ec4393011251a094e08ed9fdcc78cb4e7ab28f507638eee4e39abda"},
]
[package.extras]
docs = ["sphinx (>=1.6.0)", "sphinx_bootstrap_theme"]
flake8 = ["flake8"]
tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"]
[[package]]
name = "mccabe"
version = "0.7.0"
@ -449,14 +560,98 @@ files = [
[[package]]
name = "mypy-extensions"
version = "1.0.0"
version = "1.1.0"
description = "Type system extensions for programs checked with the mypy type checker."
optional = false
python-versions = ">=3.5"
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"},
{file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"},
]
[[package]]
name = "numpy"
version = "2.3.2"
description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.11"
groups = ["main"]
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
{file = "numpy-2.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:852ae5bed3478b92f093e30f785c98e0cb62fa0a939ed057c31716e18a7a22b9"},
{file = "numpy-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a0e27186e781a69959d0230dd9909b5e26024f8da10683bd6344baea1885168"},
{file = "numpy-2.3.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f0a1a8476ad77a228e41619af2fa9505cf69df928e9aaa165746584ea17fed2b"},
{file = "numpy-2.3.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cbc95b3813920145032412f7e33d12080f11dc776262df1712e1638207dde9e8"},
{file = "numpy-2.3.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f75018be4980a7324edc5930fe39aa391d5734531b1926968605416ff58c332d"},
{file = "numpy-2.3.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20b8200721840f5621b7bd03f8dcd78de33ec522fc40dc2641aa09537df010c3"},
{file = "numpy-2.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f91e5c028504660d606340a084db4b216567ded1056ea2b4be4f9d10b67197f"},
{file = "numpy-2.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fb1752a3bb9a3ad2d6b090b88a9a0ae1cd6f004ef95f75825e2f382c183b2097"},
{file = "numpy-2.3.2-cp311-cp311-win32.whl", hash = "sha256:4ae6863868aaee2f57503c7a5052b3a2807cf7a3914475e637a0ecd366ced220"},
{file = "numpy-2.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:240259d6564f1c65424bcd10f435145a7644a65a6811cfc3201c4a429ba79170"},
{file = "numpy-2.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:4209f874d45f921bde2cff1ffcd8a3695f545ad2ffbef6d3d3c6768162efab89"},
{file = "numpy-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bc3186bea41fae9d8e90c2b4fb5f0a1f5a690682da79b92574d63f56b529080b"},
{file = "numpy-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f4f0215edb189048a3c03bd5b19345bdfa7b45a7a6f72ae5945d2a28272727f"},
{file = "numpy-2.3.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b1224a734cd509f70816455c3cffe13a4f599b1bf7130f913ba0e2c0b2006c0"},
{file = "numpy-2.3.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3dcf02866b977a38ba3ec10215220609ab9667378a9e2150615673f3ffd6c73b"},
{file = "numpy-2.3.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:572d5512df5470f50ada8d1972c5f1082d9a0b7aa5944db8084077570cf98370"},
{file = "numpy-2.3.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8145dd6d10df13c559d1e4314df29695613575183fa2e2d11fac4c208c8a1f73"},
{file = "numpy-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:103ea7063fa624af04a791c39f97070bf93b96d7af7eb23530cd087dc8dbe9dc"},
{file = "numpy-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc927d7f289d14f5e037be917539620603294454130b6de200091e23d27dc9be"},
{file = "numpy-2.3.2-cp312-cp312-win32.whl", hash = "sha256:d95f59afe7f808c103be692175008bab926b59309ade3e6d25009e9a171f7036"},
{file = "numpy-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9e196ade2400c0c737d93465327d1ae7c06c7cb8a1756121ebf54b06ca183c7f"},
{file = "numpy-2.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:ee807923782faaf60d0d7331f5e86da7d5e3079e28b291973c545476c2b00d07"},
{file = "numpy-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3"},
{file = "numpy-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b"},
{file = "numpy-2.3.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6"},
{file = "numpy-2.3.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089"},
{file = "numpy-2.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2"},
{file = "numpy-2.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f"},
{file = "numpy-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee"},
{file = "numpy-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6"},
{file = "numpy-2.3.2-cp313-cp313-win32.whl", hash = "sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b"},
{file = "numpy-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56"},
{file = "numpy-2.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2"},
{file = "numpy-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab"},
{file = "numpy-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2"},
{file = "numpy-2.3.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a"},
{file = "numpy-2.3.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286"},
{file = "numpy-2.3.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8"},
{file = "numpy-2.3.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a"},
{file = "numpy-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91"},
{file = "numpy-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5"},
{file = "numpy-2.3.2-cp313-cp313t-win32.whl", hash = "sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5"},
{file = "numpy-2.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450"},
{file = "numpy-2.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a"},
{file = "numpy-2.3.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a"},
{file = "numpy-2.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b"},
{file = "numpy-2.3.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125"},
{file = "numpy-2.3.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19"},
{file = "numpy-2.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f"},
{file = "numpy-2.3.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5"},
{file = "numpy-2.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58"},
{file = "numpy-2.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0"},
{file = "numpy-2.3.2-cp314-cp314-win32.whl", hash = "sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2"},
{file = "numpy-2.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b"},
{file = "numpy-2.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910"},
{file = "numpy-2.3.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e"},
{file = "numpy-2.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45"},
{file = "numpy-2.3.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b"},
{file = "numpy-2.3.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2"},
{file = "numpy-2.3.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0"},
{file = "numpy-2.3.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0"},
{file = "numpy-2.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2"},
{file = "numpy-2.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf"},
{file = "numpy-2.3.2-cp314-cp314t-win32.whl", hash = "sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1"},
{file = "numpy-2.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b"},
{file = "numpy-2.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631"},
{file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:14a91ebac98813a49bc6aa1a0dfc09513dcec1d97eaf31ca21a87221a1cdcb15"},
{file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:71669b5daae692189540cffc4c439468d35a3f84f0c88b078ecd94337f6cb0ec"},
{file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:69779198d9caee6e547adb933941ed7520f896fd9656834c300bdf4dd8642712"},
{file = "numpy-2.3.2-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2c3271cc4097beb5a60f010bcc1cc204b300bb3eafb4399376418a83a1c6373c"},
{file = "numpy-2.3.2-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8446acd11fe3dc1830568c941d44449fd5cb83068e5c70bd5a470d323d448296"},
{file = "numpy-2.3.2-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa098a5ab53fa407fded5870865c6275a5cd4101cfdef8d6fafc48286a96e981"},
{file = "numpy-2.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6936aff90dda378c09bea075af0d9c675fe3a977a9d2402f95a87f440f59f619"},
{file = "numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48"},
]
[[package]]
@ -465,7 +660,7 @@ version = "24.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main"]
groups = ["main", "dev"]
files = [
{file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
@ -477,7 +672,7 @@ version = "0.12.1"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.8"
groups = ["main"]
groups = ["dev"]
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
@ -489,7 +684,7 @@ version = "4.3.7"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
groups = ["dev"]
files = [
{file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"},
{file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"},
@ -536,6 +731,19 @@ files = [
{file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"},
]
[[package]]
name = "pycparser"
version = "2.22"
description = "C parser in Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "platform_python_implementation == \"PyPy\""
files = [
{file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
]
[[package]]
name = "pyflakes"
version = "3.3.2"
@ -594,6 +802,17 @@ pluggy = ">=1.5,<2"
[package.extras]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "python-baseconv"
version = "1.2.2"
description = "Convert numbers from base 10 integers to base X strings and back again."
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "python-baseconv-1.2.2.tar.gz", hash = "sha256:0539f8bd0464013b05ad62e0a1673f0ac9086c76b43ebf9f833053527cd9931b"},
]
[[package]]
name = "python-dotenv"
version = "1.1.0"
@ -670,6 +889,20 @@ urllib3 = ">=1.21.1,<3"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "svix-ksuid"
version = "0.6.2"
description = "A pure-Python KSUID implementation"
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "svix-ksuid-0.6.2.tar.gz", hash = "sha256:beb95bd6284bdbd526834e233846653d2bd26eb162b3233513d8f2c853c78964"},
]
[package.dependencies]
python-baseconv = "*"
[[package]]
name = "testcontainers"
version = "4.10.0"
@ -748,37 +981,6 @@ files = [
{file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"},
]
[[package]]
name = "tzdata"
version = "2025.2"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
groups = ["main"]
markers = "platform_system == \"Windows\""
files = [
{file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"},
{file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"},
]
[[package]]
name = "tzlocal"
version = "5.3.1"
description = "tzinfo object for the local timezone"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d"},
{file = "tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd"},
]
[package.dependencies]
tzdata = {version = "*", markers = "platform_system == \"Windows\""}
[package.extras]
devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"]
[[package]]
name = "urllib3"
version = "2.4.0"
@ -905,7 +1107,120 @@ files = [
{file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"},
]
[[package]]
name = "zstandard"
version = "0.23.0"
description = "Zstandard bindings for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"},
{file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"},
{file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc"},
{file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573"},
{file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391"},
{file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e"},
{file = "zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd"},
{file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4"},
{file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea"},
{file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2"},
{file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9"},
{file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a"},
{file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0"},
{file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c"},
{file = "zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813"},
{file = "zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4"},
{file = "zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e"},
{file = "zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23"},
{file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a"},
{file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db"},
{file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2"},
{file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca"},
{file = "zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c"},
{file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e"},
{file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5"},
{file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48"},
{file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c"},
{file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003"},
{file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78"},
{file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473"},
{file = "zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160"},
{file = "zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0"},
{file = "zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094"},
{file = "zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8"},
{file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1"},
{file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072"},
{file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20"},
{file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373"},
{file = "zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db"},
{file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772"},
{file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105"},
{file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba"},
{file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd"},
{file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a"},
{file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90"},
{file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35"},
{file = "zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d"},
{file = "zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b"},
{file = "zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9"},
{file = "zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a"},
{file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2"},
{file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5"},
{file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f"},
{file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed"},
{file = "zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea"},
{file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847"},
{file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171"},
{file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840"},
{file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690"},
{file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b"},
{file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057"},
{file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33"},
{file = "zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd"},
{file = "zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b"},
{file = "zstandard-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ef3775758346d9ac6214123887d25c7061c92afe1f2b354f9388e9e4d48acfc"},
{file = "zstandard-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4051e406288b8cdbb993798b9a45c59a4896b6ecee2f875424ec10276a895740"},
{file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2d1a054f8f0a191004675755448d12be47fa9bebbcffa3cdf01db19f2d30a54"},
{file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83fa6cae3fff8e98691248c9320356971b59678a17f20656a9e59cd32cee6d8"},
{file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32ba3b5ccde2d581b1e6aa952c836a6291e8435d788f656fe5976445865ae045"},
{file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f146f50723defec2975fb7e388ae3a024eb7151542d1599527ec2aa9cacb152"},
{file = "zstandard-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bfe8de1da6d104f15a60d4a8a768288f66aa953bbe00d027398b93fb9680b26"},
{file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:29a2bc7c1b09b0af938b7a8343174b987ae021705acabcbae560166567f5a8db"},
{file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61f89436cbfede4bc4e91b4397eaa3e2108ebe96d05e93d6ccc95ab5714be512"},
{file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53ea7cdc96c6eb56e76bb06894bcfb5dfa93b7adcf59d61c6b92674e24e2dd5e"},
{file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a4ae99c57668ca1e78597d8b06d5af837f377f340f4cce993b551b2d7731778d"},
{file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:379b378ae694ba78cef921581ebd420c938936a153ded602c4fea612b7eaa90d"},
{file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:50a80baba0285386f97ea36239855f6020ce452456605f262b2d33ac35c7770b"},
{file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:61062387ad820c654b6a6b5f0b94484fa19515e0c5116faf29f41a6bc91ded6e"},
{file = "zstandard-0.23.0-cp38-cp38-win32.whl", hash = "sha256:b8c0bd73aeac689beacd4e7667d48c299f61b959475cdbb91e7d3d88d27c56b9"},
{file = "zstandard-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:a05e6d6218461eb1b4771d973728f0133b2a4613a6779995df557f70794fd60f"},
{file = "zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb"},
{file = "zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916"},
{file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a"},
{file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259"},
{file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4"},
{file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58"},
{file = "zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15"},
{file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269"},
{file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700"},
{file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9"},
{file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69"},
{file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70"},
{file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2"},
{file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5"},
{file = "zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274"},
{file = "zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58"},
{file = "zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09"},
]
[package.dependencies]
cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""}
[package.extras]
cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.1"
python-versions = "^3.13"
content-hash = "678b22f11117e1b73abfc5359fc144a7c0eeb8c67ed7fb8ef1a66d6587b47232"
content-hash = "e55e2d0af184876ca7c3a42d700c085abd3516430120c04cb984f254a052ac00"

View File

@ -10,16 +10,18 @@ python = "^3.13"
pytest = "^8.3.5"
psycopg2 = "^2.9.10"
testcontainers = "^4.10.0"
black = "^25.1.0"
clickhouse-driver = "^0.2.9"
requests = "^2.32.3"
wiremock = "^2.6.1"
numpy = "^2.3.2"
clickhouse-connect = "^0.8.18"
svix-ksuid = "^0.6.2"
[tool.poetry.group.dev.dependencies]
pylint = "^3.3.6"
isort = "^6.0.1"
autoflake = "^2.3.1"
black = "^25.1.0"
[build-system]
requires = ["poetry-core"]
@ -36,10 +38,10 @@ addopts = "-ra"
ignore = [".venv"]
[tool.pylint.format]
max-line-length = "88"
max-line-length = "400"
[tool.pylint."messages control"]
disable = ["missing-module-docstring", "missing-function-docstring", "missing-class-docstring"]
disable = ["missing-module-docstring", "missing-function-docstring", "missing-class-docstring", "duplicate-code", "dangerous-default-value", "too-many-positional-arguments", "too-many-arguments", "too-few-public-methods", "too-many-instance-attributes", "too-many-locals", "too-many-statements"]
[tool.isort]
profile = "black"

View File

View File

@ -9,13 +9,15 @@ logger = setup_logger(__name__)
def test_register(signoz: types.SigNoz, get_jwt_token) -> None:
response = requests.get(signoz.self.host_config.get("/api/v1/version"), timeout=2)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/version"), timeout=2
)
assert response.status_code == HTTPStatus.OK
assert response.json()["setupCompleted"] is False
response = requests.post(
signoz.self.host_config.get("/api/v1/register"),
signoz.self.host_configs["8080"].get("/api/v1/register"),
json={
"name": "admin",
"orgId": "",
@ -27,7 +29,9 @@ def test_register(signoz: types.SigNoz, get_jwt_token) -> None:
)
assert response.status_code == HTTPStatus.OK
response = requests.get(signoz.self.host_config.get("/api/v1/version"), timeout=2)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/version"), timeout=2
)
assert response.status_code == HTTPStatus.OK
assert response.json()["setupCompleted"] is True
@ -35,7 +39,7 @@ def test_register(signoz: types.SigNoz, get_jwt_token) -> None:
admin_token = get_jwt_token("admin@integration.test", "password")
response = requests.get(
signoz.self.host_config.get("/api/v1/user"),
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
@ -52,7 +56,7 @@ def test_register(signoz: types.SigNoz, get_jwt_token) -> None:
assert found_user["role"] == "ADMIN"
response = requests.get(
signoz.self.host_config.get(f"/api/v1/user/{found_user["id"]}"),
signoz.self.host_configs["8080"].get(f"/api/v1/user/{found_user["id"]}"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
@ -64,33 +68,37 @@ def test_register(signoz: types.SigNoz, get_jwt_token) -> None:
def test_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None:
# Generate an invite token for the editor user
response = requests.post(
signoz.self.host_config.get("/api/v1/invite"),
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": "editor@integration.test", "role": "EDITOR"},
timeout=2,
headers={
"Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password")}" # pylint: disable=line-too-long
"Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password")}"
},
)
assert response.status_code == HTTPStatus.CREATED
response = requests.get(
signoz.self.host_config.get("/api/v1/invite"),
signoz.self.host_configs["8080"].get("/api/v1/invite"),
timeout=2,
headers={
"Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password")}" # pylint: disable=line-too-long
"Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password")}"
},
)
invite_response = response.json()["data"]
found_invite = next(
(invite for invite in invite_response if invite["email"] == "editor@integration.test"),
(
invite
for invite in invite_response
if invite["email"] == "editor@integration.test"
),
None,
)
# Register the editor user using the invite token
response = requests.post(
signoz.self.host_config.get("/api/v1/invite/accept"),
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json={
"password": "password",
"displayName": "editor",
@ -102,9 +110,7 @@ def test_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None:
# Verify that the invite token has been deleted
response = requests.get(
signoz.self.host_config.get(
f"/api/v1/invite/{found_invite['token']}"
), # pylint: disable=line-too-long
signoz.self.host_configs["8080"].get(f"/api/v1/invite/{found_invite['token']}"),
timeout=2,
)
@ -112,10 +118,10 @@ def test_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None:
# Verify that an admin endpoint cannot be called by the editor user
response = requests.get(
signoz.self.host_config.get("/api/v1/user"),
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={
"Authorization": f"Bearer {get_jwt_token("editor@integration.test", "password")}" # pylint: disable=line-too-long
"Authorization": f"Bearer {get_jwt_token("editor@integration.test", "password")}"
},
)
@ -123,10 +129,10 @@ def test_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None:
# Verify that the editor has been created
response = requests.get(
signoz.self.host_config.get("/api/v1/user"),
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={
"Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password")}" # pylint: disable=line-too-long
"Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password")}"
},
)
@ -148,32 +154,34 @@ def test_revoke_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None
admin_token = get_jwt_token("admin@integration.test", "password")
# Generate an invite token for the viewer user
response = requests.post(
signoz.self.host_config.get("/api/v1/invite"),
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": "viewer@integration.test", "role": "VIEWER"},
timeout=2,
headers={
"Authorization": f"Bearer {admin_token}" # pylint: disable=line-too-long
},
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.CREATED
response = requests.get(
signoz.self.host_config.get("/api/v1/invite"),
signoz.self.host_configs["8080"].get("/api/v1/invite"),
timeout=2,
headers={
"Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password")}" # pylint: disable=line-too-long
"Authorization": f"Bearer {get_jwt_token("admin@integration.test", "password")}"
},
)
invite_response = response.json()["data"]
found_invite = next(
(invite for invite in invite_response if invite["email"] == "viewer@integration.test"),
(
invite
for invite in invite_response
if invite["email"] == "viewer@integration.test"
),
None,
)
response = requests.delete(
signoz.self.host_config.get(f"/api/v1/invite/{found_invite['id']}"),
signoz.self.host_configs["8080"].get(f"/api/v1/invite/{found_invite['id']}"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
@ -182,7 +190,7 @@ def test_revoke_invite_and_register(signoz: types.SigNoz, get_jwt_token) -> None
# Try registering the viewer user with the invite token
response = requests.post(
signoz.self.host_config.get("/api/v1/invite/accept"),
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json={
"password": "password",
"displayName": "viewer",
@ -198,7 +206,7 @@ def test_self_access(signoz: types.SigNoz, get_jwt_token) -> None:
admin_token = get_jwt_token("admin@integration.test", "password")
response = requests.get(
signoz.self.host_config.get("/api/v1/user"),
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
@ -212,7 +220,7 @@ def test_self_access(signoz: types.SigNoz, get_jwt_token) -> None:
)
response = requests.get(
signoz.self.host_config.get(f"/api/v1/user/{found_user['id']}"),
signoz.self.host_configs["8080"].get(f"/api/v1/user/{found_user['id']}"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)

View File

@ -55,7 +55,7 @@ def test_apply_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None:
access_token = get_jwt_token("admin@integration.test", "password")
response = requests.post(
url=signoz.self.host_config.get("/api/v3/licenses"),
url=signoz.self.host_configs["8080"].get("/api/v3/licenses"),
json={"key": "secret-key"},
headers={"Authorization": "Bearer " + access_token},
timeout=5,
@ -64,7 +64,7 @@ def test_apply_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None:
assert response.status_code == http.HTTPStatus.ACCEPTED
response = requests.post(
url=signoz.zeus.host_config.get("/__admin/requests/count"),
url=signoz.zeus.host_configs["8080"].get("/__admin/requests/count"),
json={"method": "GET", "url": "/v2/licenses/me"},
timeout=5,
)
@ -114,7 +114,7 @@ def test_refresh_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None
access_token = get_jwt_token("admin@integration.test", "password")
response = requests.put(
url=signoz.self.host_config.get("/api/v3/licenses"),
url=signoz.self.host_configs["8080"].get("/api/v3/licenses"),
headers={"Authorization": "Bearer " + access_token},
timeout=5,
)
@ -129,7 +129,7 @@ def test_refresh_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None
assert json.loads(record)["valid_from"] == 1732146922
response = requests.post(
url=signoz.zeus.host_config.get("/__admin/requests/count"),
url=signoz.zeus.host_configs["8080"].get("/__admin/requests/count"),
json={"method": "GET", "url": "/v2/licenses/me"},
timeout=5,
)
@ -166,7 +166,7 @@ def test_license_checkout(signoz: SigNoz, make_http_mocks, get_jwt_token) -> Non
access_token = get_jwt_token("admin@integration.test", "password")
response = requests.post(
url=signoz.self.host_config.get("/api/v1/checkout"),
url=signoz.self.host_configs["8080"].get("/api/v1/checkout"),
json={"url": "https://integration-signoz.com"},
headers={"Authorization": "Bearer " + access_token},
timeout=5,
@ -176,7 +176,7 @@ def test_license_checkout(signoz: SigNoz, make_http_mocks, get_jwt_token) -> Non
assert response.json()["data"]["redirectURL"] == "https://signoz.checkout.com"
response = requests.post(
url=signoz.zeus.host_config.get("/__admin/requests/count"),
url=signoz.zeus.host_configs["8080"].get("/__admin/requests/count"),
json={"method": "POST", "url": "/v2/subscriptions/me/sessions/checkout"},
timeout=5,
)
@ -213,7 +213,7 @@ def test_license_portal(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None:
access_token = get_jwt_token("admin@integration.test", "password")
response = requests.post(
url=signoz.self.host_config.get("/api/v1/portal"),
url=signoz.self.host_configs["8080"].get("/api/v1/portal"),
json={"url": "https://integration-signoz.com"},
headers={"Authorization": "Bearer " + access_token},
timeout=5,
@ -223,7 +223,7 @@ def test_license_portal(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None:
assert response.json()["data"]["redirectURL"] == "https://signoz.portal.com"
response = requests.post(
url=signoz.zeus.host_config.get("/__admin/requests/count"),
url=signoz.zeus.host_configs["8080"].get("/__admin/requests/count"),
json={"method": "POST", "url": "/v2/subscriptions/me/sessions/portal"},
timeout=5,
)

View File

@ -9,13 +9,14 @@ def test_api_key(signoz: types.SigNoz, get_jwt_token) -> None:
admin_token = get_jwt_token("admin@integration.test", "password")
response = requests.post(
signoz.self.host_config.get("/api/v1/pats"),
signoz.self.host_configs["8080"].get("/api/v1/pats"),
headers={"Authorization": f"Bearer {admin_token}"},
json={
"name": "admin",
"role": "ADMIN",
"expiresInDays": 1,
},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
@ -24,7 +25,7 @@ def test_api_key(signoz: types.SigNoz, get_jwt_token) -> None:
assert "token" in pat_response["data"]
response = requests.get(
signoz.self.host_config.get("/api/v1/user"),
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"SIGNOZ-API-KEY": f"{pat_response["data"]["token"]}"},
)
@ -33,13 +34,18 @@ def test_api_key(signoz: types.SigNoz, get_jwt_token) -> None:
user_response = response.json()
found_user = next(
(user for user in user_response["data"] if user["email"] == "admin@integration.test"),
(
user
for user in user_response["data"]
if user["email"] == "admin@integration.test"
),
None,
)
response = requests.get(
signoz.self.host_config.get("/api/v1/pats"),
signoz.self.host_configs["8080"].get("/api/v1/pats"),
headers={"SIGNOZ-API-KEY": f"{pat_response["data"]["token"]}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK

View File

@ -1,17 +0,0 @@
from clickhouse_driver.dbapi.cursor import Cursor
from fixtures import types
def test_telemetry_databases(signoz: types.SigNoz) -> None:
cursor = signoz.telemetrystore.conn.cursor()
assert isinstance(cursor, Cursor)
cursor.execute("SHOW DATABASES")
records = cursor.fetchall()
assert any("signoz_metrics" in record for record in records)
assert any("signoz_logs" in record for record in records)
assert any("signoz_traces" in record for record in records)
assert any("signoz_metadata" in record for record in records)
assert any("signoz_analytics" in record for record in records)

View File

@ -0,0 +1,35 @@
from http import HTTPStatus
import numpy as np
import requests
from fixtures import types
def test_setup(signoz: types.SigNoz) -> None:
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/version"), timeout=2
)
assert response.status_code == HTTPStatus.OK
def test_telemetry_databases_exist(signoz: types.SigNoz) -> None:
arr: np.ndarray = signoz.telemetrystore.conn.query_np("SHOW DATABASES")
databases = arr.tolist() if arr.size > 0 else []
required_databases = [
"signoz_metrics",
"signoz_logs",
"signoz_traces",
"signoz_metadata",
"signoz_analytics",
]
for db_name in required_databases:
assert any(
db_name in str(db) for db in databases
), f"Database {db_name} not found"
def test_teardown(signoz: types.SigNoz) -> None: # pylint: disable=unused-argument
pass

View File

@ -0,0 +1,734 @@
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, List
import requests
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logs import Logs
def test_logs_list(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_jwt_token: Callable[[], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert 2 logs with different attributes
Tests:
1. Query logs for the last 10 seconds and check if the logs are returned in the correct order
2. Query values of severity_text attribute from the autocomplete API
3. Query values of severity_text attribute from the fields API
4. Query values of code.file attribute from the autocomplete API
5. Query values of code.file attribute from the fields API
6. Query values of code.line attribute from the autocomplete API
7. Query values of code.line attribute from the fields API
"""
insert_logs(
[
Logs(
timestamp=datetime.now(tz=timezone.utc) - timedelta(seconds=1),
resources={
"deployment.environment": "production",
"service.name": "java",
"os.type": "linux",
"host.name": "linux-001",
"cloud.provider": "integration",
"cloud.account.id": "001",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/Integration.java",
"code.function": "com.example.Integration.process",
"code.line": 120,
"telemetry.sdk.language": "java",
},
body="This is a log message, coming from a java application",
severity_text="DEBUG",
),
Logs(
timestamp=datetime.now(tz=timezone.utc),
resources={
"deployment.environment": "production",
"service.name": "go",
"os.type": "linux",
"host.name": "linux-001",
"cloud.provider": "integration",
"cloud.account.id": "001",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/integration.go",
"code.function": "com.example.Integration.process",
"code.line": 120,
"telemetry.sdk.language": "go",
},
body="This is a log message, coming from a go application",
severity_text="INFO",
),
]
)
token = get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)
# Query Logs for the last 10 seconds and check if the logs are returned in the correct order
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(datetime.now(tz=timezone.utc) - timedelta(seconds=10)).timestamp()
* 1000
),
"end": int(datetime.now(tz=timezone.utc).timestamp() * 1000),
"requestType": "raw",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"disabled": False,
"limit": 100,
"offset": 0,
"order": [
{"key": {"name": "timestamp"}, "direction": "desc"},
{"key": {"name": "id"}, "direction": "desc"},
],
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": False, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
rows = results[0]["rows"]
assert len(rows) == 2
assert (
rows[0]["data"]["body"] == "This is a log message, coming from a go application"
)
assert rows[0]["data"]["resources_string"] == {
"cloud.account.id": "001",
"cloud.provider": "integration",
"deployment.environment": "production",
"host.name": "linux-001",
"os.type": "linux",
"service.name": "go",
}
assert rows[0]["data"]["attributes_string"] == {
"code.file": "/opt/integration.go",
"code.function": "com.example.Integration.process",
"log.iostream": "stdout",
"logtag": "F",
"telemetry.sdk.language": "go",
}
assert rows[0]["data"]["attributes_number"] == {"code.line": 120}
assert (
rows[1]["data"]["body"]
== "This is a log message, coming from a java application"
)
assert rows[1]["data"]["resources_string"] == {
"cloud.account.id": "001",
"cloud.provider": "integration",
"deployment.environment": "production",
"host.name": "linux-001",
"os.type": "linux",
"service.name": "java",
}
assert rows[1]["data"]["attributes_string"] == {
"code.file": "/opt/Integration.java",
"code.function": "com.example.Integration.process",
"log.iostream": "stdout",
"logtag": "F",
"telemetry.sdk.language": "java",
}
assert rows[1]["data"]["attributes_number"] == {"code.line": 120}
# Query values of severity_text attribute from the autocomplete API
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v3/autocomplete/attribute_values"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"aggregateOperator": "noop",
"dataSource": "logs",
"aggregateAttribute": "",
"attributeKey": "severity_text",
"searchText": "",
"filterAttributeKeyDataType": "string",
"tagType": "resource",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = response.json()["data"]["stringAttributeValues"]
assert len(values) == 2
assert "DEBUG" in values
assert "INFO" in values
# Query values of severity_text attribute from the fields API
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/fields/values"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"signal": "logs",
"name": "severity_text",
"searchText": "",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = response.json()["data"]["values"]["stringValues"]
assert len(values) == 2
assert "DEBUG" in values
assert "INFO" in values
# Query values of code.file attribute from the autocomplete API
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v3/autocomplete/attribute_values"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"aggregateOperator": "noop",
"dataSource": "logs",
"aggregateAttribute": "",
"attributeKey": "code.file",
"searchText": "",
"filterAttributeKeyDataType": "string",
"tagType": "tag",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = response.json()["data"]["stringAttributeValues"]
assert len(values) == 2
assert "/opt/Integration.java" in values
assert "/opt/integration.go" in values
# Query values of code.file attribute from the fields API
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/fields/values"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"signal": "logs",
"name": "code.file",
"searchText": "",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = response.json()["data"]["values"]["stringValues"]
assert len(values) == 2
assert "/opt/Integration.java" in values
assert "/opt/integration.go" in values
# Query values of code.line attribute from the autocomplete API
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v3/autocomplete/attribute_values"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"aggregateOperator": "noop",
"dataSource": "logs",
"aggregateAttribute": "",
"attributeKey": "code.line",
"searchText": "",
"filterAttributeKeyDataType": "float64",
"tagType": "tag",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = response.json()["data"]["numberAttributeValues"]
assert len(values) == 1
assert 120 in values
# Query values of code.line attribute from the fields API
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/fields/values"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
params={
"signal": "logs",
"name": "code.line",
"searchText": "",
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = response.json()["data"]["values"]["numberValues"]
assert len(values) == 1
assert 120 in values
def test_logs_time_series_count(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_jwt_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert 17 logs with service.name attribute set to "java" and severity_text attribute set to "DEBUG", 23 logs with service.name attribute set to "erlang" and severity_text attribute set to "ERROR", 29 logs with service.name attribute set to "go" and severity_text attribute set to "WARNING".
All logs have incrementing code.line attribute, modulo 2 for host.name and cloud.account.id.
Tests:
1. count() of all logs for the last 5 minutes
2. count() of all logs where code.line = 7 for last 5 minutes
3. count() of all logs grouped by host.name for the last 5 minutes
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = []
for i in range(17):
logs.append(
Logs(
timestamp=now
- timedelta(
microseconds=i + 1
), # These logs will be grouped in the now - 1 minute bucket
resources={
"deployment.environment": "production",
"service.name": "java",
"os.type": "linux",
"host.name": f"linux-00{i%2}",
"cloud.provider": "integration",
"cloud.account.id": f"00{i%2}",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/Integration.java",
"code.function": "com.example.Integration.process",
"code.line": i + 1,
"telemetry.sdk.language": "java",
},
body=f"This is a log message, number {i+1} coming from a java application",
severity_text="DEBUG",
)
)
for i in range(23):
logs.append(
Logs(
timestamp=now
- timedelta(minutes=1)
- timedelta(
microseconds=i + 1
), # These logs will be grouped in the now - 2 minute bucket
resources={
"deployment.environment": "production",
"service.name": "erlang",
"os.type": "linux",
"host.name": f"linux-00{i%2}",
"cloud.provider": "integration",
"cloud.account.id": f"00{i%2}",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/Integration.erlang",
"code.function": "com.example.Integration.process",
"code.line": i + 1,
"telemetry.sdk.language": "erlang",
},
body=f"This is a log message, number {i+1} coming from a erlang application",
severity_text="ERROR",
)
)
for i in range(29):
logs.append(
Logs(
timestamp=now
- timedelta(minutes=2)
- timedelta(
microseconds=i + 1
), # These logs will be grouped in the now - 3 minute bucket
resources={
"deployment.environment": "production",
"service.name": "go",
"os.type": "linux",
"host.name": f"linux-00{i%2}",
"cloud.provider": "integration",
"cloud.account.id": f"00{i%2}",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/Integration.go",
"code.function": "com.example.Integration.process",
"code.line": i + 1,
"telemetry.sdk.language": "go",
},
body=f"This is a log message, number {i+1} coming from a go application",
severity_text="WARNING",
)
)
insert_logs(logs)
token = get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)
# count() of all logs for the last 5 minutes
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "time_series",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": False, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
aggregations = results[0]["aggregations"]
assert len(aggregations) == 1
series = aggregations[0]["series"]
assert len(series) == 1
values = series[0]["values"]
assert len(values) == 3
# Care about the order of the values
assert [
i
for i in values
if i
not in [
{
"timestamp": int(
(now - timedelta(minutes=3))
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"value": 29,
},
{
"timestamp": int(
(now - timedelta(minutes=2))
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"value": 23,
},
{
"timestamp": int(
(now - timedelta(minutes=1))
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"value": 17,
},
]
] == []
# count() of all logs where cloud.account.id is 000 or service.name is erlang for the last 5 minutes
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "time_series",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "code.line = 7"},
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": False, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
aggregations = results[0]["aggregations"]
assert len(aggregations) == 1
series = aggregations[0]["series"]
assert len(series) == 1
values = series[0]["values"]
assert len(values) == 3
# Care about the order of the values
assert [
i
for i in values
if i
not in [
{
"timestamp": int(
(now - timedelta(minutes=3))
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"value": 1,
},
{
"timestamp": int(
(now - timedelta(minutes=2))
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"value": 1,
},
{
"timestamp": int(
(now - timedelta(minutes=1))
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"value": 1,
},
]
] == []
# count() of all logs grouped by host.name for the last 5 minutes
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "time_series",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"groupBy": [
{
"name": "host.name",
"fieldDataType": "string",
"fieldContext": "resource",
}
],
"order": [
{"key": {"name": "host.name"}, "direction": "desc"}
],
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": False, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
aggregations = results[0]["aggregations"]
assert len(aggregations) == 1
series = aggregations[0]["series"]
assert len(series) == 2
# Care about the order of the values
assert series[0]["labels"] == [
{
"key": {
"name": "host.name",
"signal": "",
"fieldContext": "",
"fieldDataType": "",
},
"value": "linux-001",
}
]
assert {
"timestamp": int(
(now - timedelta(minutes=3)).replace(second=0, microsecond=0).timestamp()
* 1000
),
"value": 14,
} in series[0]["values"]
assert {
"timestamp": int(
(now - timedelta(minutes=2)).replace(second=0, microsecond=0).timestamp()
* 1000
),
"value": 11,
} in series[0]["values"]
assert {
"timestamp": int(
(now - timedelta(minutes=1)).replace(second=0, microsecond=0).timestamp()
* 1000
),
"value": 8,
} in series[0]["values"]
assert series[1]["labels"] == [
{
"key": {
"name": "host.name",
"signal": "",
"fieldContext": "",
"fieldDataType": "",
},
"value": "linux-000",
}
]
assert {
"timestamp": int(
(now - timedelta(minutes=3)).replace(second=0, microsecond=0).timestamp()
* 1000
),
"value": 15,
} in series[1]["values"]
assert {
"timestamp": int(
(now - timedelta(minutes=2)).replace(second=0, microsecond=0).timestamp()
* 1000
),
"value": 12,
} in series[1]["values"]
assert {
"timestamp": int(
(now - timedelta(minutes=1)).replace(second=0, microsecond=0).timestamp()
* 1000
),
"value": 9,
} in series[1]["values"]