From a43a7e1eae1ef0921c72bd4c4ad0c22ddd756a74 Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 11:59:43 +0200 Subject: [PATCH 01/11] add client creation to README --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index f548956..f139016 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ For more information check out the [official documentation](https://axiom.co/docs). -## Usage +## Quickstart Install using `pip`: @@ -48,7 +48,7 @@ import axiom access_token = os.getenv("AXIOM_TOKEN") org_id = os.getenv("AXIOM_ORG_ID") - +client = axiom.Client(access_token, org_id) client.datasets.query(r"['my-dataset'] | where foo == 'bar' | limit 100") ``` From 821d4f4d49eaed53878a8dea7f733dba866d27c3 Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 12:21:23 +0200 Subject: [PATCH 02/11] add ingest example to README --- README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/README.md b/README.md index f139016..660adde 100644 --- a/README.md +++ b/README.md @@ -49,6 +49,16 @@ access_token = os.getenv("AXIOM_TOKEN") org_id = os.getenv("AXIOM_ORG_ID") client = axiom.Client(access_token, org_id) + +time = datetime.utcnow() - timedelta(hours=1) +time_formatted = rfc3339.format(time) + +client.datasets.ingest_events( + dataset="my-dataset", + events=[ + {"foo": "bar", "_time": time_formatted}, + {"bar": "baz", "_time": time_formatted}, + ]) client.datasets.query(r"['my-dataset'] | where foo == 'bar' | limit 100") ``` From ee61b2ffbfcd387a1336650751444619a8ef688e Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 12:34:33 +0200 Subject: [PATCH 03/11] fallback to env variables if values are not provided --- README.md | 17 +++++++++++++---- axiom/client.py | 14 ++++++++++++-- 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 660adde..c592db8 100644 --- a/README.md +++ b/README.md @@ -45,10 +45,7 @@ Create and use a client like this: import os import axiom -access_token = os.getenv("AXIOM_TOKEN") -org_id = os.getenv("AXIOM_ORG_ID") - -client = axiom.Client(access_token, org_id) +client = axiom.Client() time = datetime.utcnow() - timedelta(hours=1) time_formatted = rfc3339.format(time) @@ -62,6 +59,18 @@ client.datasets.ingest_events( client.datasets.query(r"['my-dataset'] | where foo == 'bar' | limit 100") ``` +You can also configure the client using options passed to the client constructor: + +```py +import os +import axiom + +access_token = os.getenv("AXIOM_TOKEN") +org_id = os.getenv("AXIOM_ORG_ID") + +client = axiom.Client(access_token, org_id) +``` + ## Contributing This project uses [Poetry](https://python-poetry.org) for dependecy management diff --git a/axiom/client.py b/axiom/client.py index 128f4c7..d27e90b 100644 --- a/axiom/client.py +++ b/axiom/client.py @@ -2,6 +2,8 @@ import ndjson import dacite import ujson +import os +from typing import Optional from logging import getLogger from dataclasses import dataclass, field from requests_toolbelt.sessions import BaseUrlSession @@ -11,6 +13,7 @@ from .users import UsersClient from .__init__ import __version__ + AXIOM_URL = "https://cloud.axiom.co" @@ -48,7 +51,14 @@ class Client: # pylint: disable=R0903 datasets: DatasetsClient users: UsersClient - def __init__(self, token: str, org_id: str = None, url_base: str = AXIOM_URL): + def __init__(self, token: Optional[str], org_id: Optional[str] = None, url_base: Optional[str] = None): + # fallback to env variables if token, org_id or url are not provided + if token is None: + token = os.getenv("AXIOM_TOKEN") + if org_id is None: + org_id = os.getenv("AXIOM_ORG_ID") + if url_base is None: + url_base = AXIOM_URL # Append /api/v1 to the url_base url_base = url_base.rstrip("/") + "/api/v1/" @@ -71,7 +81,7 @@ def __init__(self, token: str, org_id: str = None, url_base: str = AXIOM_URL): } ) - # if there is and organization id passed, + # if there is an organization id passed, # set it in the header if org_id: logger.info("found organization id: %s" % org_id) From 6a5ab843aa78b578b2f3eec9e57b4fa3a67f207c Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 14:28:07 +0200 Subject: [PATCH 04/11] DX-383; move ingest & query methods to top class client --- axiom/client.py | 267 ++++++++++++++++++++++++++++++++++++++--- axiom/datasets.py | 235 +----------------------------------- tests/test_client.py | 205 ++++++++++++++++++++++++++++++- tests/test_datasets.py | 218 ++------------------------------- 4 files changed, 466 insertions(+), 459 deletions(-) diff --git a/axiom/client.py b/axiom/client.py index d27e90b..f378068 100644 --- a/axiom/client.py +++ b/axiom/client.py @@ -1,15 +1,21 @@ """Client provides an easy-to use client library to connect to Axiom.""" import ndjson import dacite +import gzip import ujson import os -from typing import Optional +from .util import Util +from enum import Enum +from humps import decamelize +from typing import Optional, List, Dict, Any from logging import getLogger -from dataclasses import dataclass, field +from dataclasses import dataclass, field, asdict +from datetime import datetime from requests_toolbelt.sessions import BaseUrlSession -from requests_toolbelt.utils.dump import dump_response, dump_all +from requests_toolbelt.utils.dump import dump_response from requests.adapters import HTTPAdapter, Retry -from .datasets import DatasetsClient, ContentType +from .datasets import DatasetsClient +from .query import QueryLegacy, QueryResult, QueryOptions, QueryLegacyResult, QueryKind from .users import UsersClient from .__init__ import __version__ @@ -24,6 +30,79 @@ class Error: error: str = field(default=None) +@dataclass +class IngestFailure: + """The ingestion failure of a single event""" + + timestamp: datetime + error: str + + +@dataclass +class IngestStatus: + """The status after an event ingestion operation""" + + ingested: int + failed: int + failures: List[IngestFailure] + processed_bytes: int + blocks_created: int + wal_length: int + + +@dataclass +class IngestOptions: + """IngestOptions specifies the optional parameters for the Ingest and + IngestEvents method of the Datasets service.""" + + # timestamp field defines a custom field to extract the ingestion timestamp + # from. Defaults to `_time`. + timestamp_field: str = field(default="_time") + # timestamp format defines a custom format for the TimestampField. + # The reference time is `Mon Jan 2 15:04:05 -0700 MST 2006`, as specified + # in https://pkg.go.dev/time/?tab=doc#Parse. + timestamp_format: str = field(default=None) + # CSV delimiter is the delimiter that separates CSV fields. Only valid when + # the content to be ingested is CSV formatted. + CSV_delimiter: str = field(default=None) + + +class AplResultFormat(Enum): + """The result format of an APL query.""" + + Legacy = "legacy" + + +class ContentType(Enum): + """ContentType describes the content type of the data to ingest.""" + + JSON = "application/json" + NDJSON = "application/x-ndjson" + CSV = "text/csv" + + +class ContentEncoding(Enum): + """ContentEncoding describes the content encoding of the data to ingest.""" + + IDENTITY = "1" + GZIP = "gzip" + + +class WrongQueryKindException(Exception): + pass + + +@dataclass +class AplOptions: + """AplOptions specifies the optional parameters for the apl query method.""" + + start_time: Optional[datetime] = field(default=None) + end_time: Optional[datetime] = field(default=None) + no_cache: bool = field(default=False) + save: bool = field(default=False) + format: AplResultFormat = field(default=AplResultFormat.Legacy) + + def raise_response_error(r): if r.status_code >= 400: print("==== Response Debugging ====") @@ -51,7 +130,12 @@ class Client: # pylint: disable=R0903 datasets: DatasetsClient users: UsersClient - def __init__(self, token: Optional[str], org_id: Optional[str] = None, url_base: Optional[str] = None): + def __init__( + self, + token: Optional[str], + org_id: Optional[str] = None, + url_base: Optional[str] = None, + ): # fallback to env variables if token, org_id or url are not provided if token is None: token = os.getenv("AXIOM_TOKEN") @@ -62,17 +146,19 @@ def __init__(self, token: Optional[str], org_id: Optional[str] = None, url_base: # Append /api/v1 to the url_base url_base = url_base.rstrip("/") + "/api/v1/" - logger = getLogger() - session = BaseUrlSession(url_base) + self.logger = getLogger() + self.session = BaseUrlSession(url_base) # set exponential retries retries = Retry( total=3, backoff_factor=2, status_forcelist=[500, 502, 503, 504] ) - session.mount("http://", HTTPAdapter(max_retries=retries)) - session.mount("https://", HTTPAdapter(max_retries=retries)) + self.session.mount("http://", HTTPAdapter(max_retries=retries)) + self.session.mount("https://", HTTPAdapter(max_retries=retries)) # hook on responses, raise error when response is not successfull - session.hooks = {"response": lambda r, *args, **kwargs: raise_response_error(r)} - session.headers.update( + self.session.hooks = { + "response": lambda r, *args, **kwargs: raise_response_error(r) + } + self.session.headers.update( { "Authorization": "Bearer %s" % token, # set a default Content-Type header, can be overriden by requests. @@ -84,8 +170,159 @@ def __init__(self, token: Optional[str], org_id: Optional[str] = None, url_base: # if there is an organization id passed, # set it in the header if org_id: - logger.info("found organization id: %s" % org_id) - session.headers.update({"X-Axiom-Org-Id": org_id}) + self.logger.info("found organization id: %s" % org_id) + self.session.headers.update({"X-Axiom-Org-Id": org_id}) + + self.datasets = DatasetsClient(self.session, self.logger) + self.users = UsersClient(self.session) + + def ingest( + self, + dataset: str, + payload: bytes, + contentType: ContentType, + enc: ContentEncoding, + opts: Optional[IngestOptions] = None, + ) -> IngestStatus: + """Ingest the events into the named dataset and returns the status.""" + path = "datasets/%s/ingest" % dataset + + # check if passed content type and encoding are correct + if not contentType: + raise ValueError("unknown content-type, choose one of json,x-ndjson or csv") + + if not enc: + raise ValueError("unknown content-encoding") + + # set headers + headers = {"Content-Type": contentType.value, "Content-Encoding": enc.value} + # prepare query params + params = self._prepare_ingest_options(opts) + + # override the default header and set the value from the passed parameter + res = self.session.post(path, data=payload, headers=headers, params=params) + status_snake = decamelize(res.json()) + return Util.from_dict(IngestStatus, status_snake) + + def ingest_events( + self, + dataset: str, + events: List[dict], + opts: Optional[IngestOptions] = None, + ) -> IngestStatus: + """Ingest the events into the named dataset and returns the status.""" + # encode request payload to NDJSON + content = ndjson.dumps(events).encode("UTF-8") + gzipped = gzip.compress(content) + + return self.ingest( + dataset, gzipped, ContentType.NDJSON, ContentEncoding.GZIP, opts + ) + + def query_legacy( + self, id: str, query: QueryLegacy, opts: QueryOptions + ) -> QueryLegacyResult: + """Executes the given query on the dataset identified by its id.""" + if not opts.saveAsKind or (opts.saveAsKind == QueryKind.APL): + raise WrongQueryKindException( + "invalid query kind %s: must be %s or %s" + % (opts.saveAsKind, QueryKind.ANALYTICS, QueryKind.STREAM) + ) + + path = "datasets/%s/query" % id + payload = ujson.dumps(asdict(query), default=Util.handle_json_serialization) + self.logger.debug("sending query %s" % payload) + params = self._prepare_query_options(opts) + res = self.session.post(path, data=payload, params=params) + result = Util.from_dict(QueryLegacyResult, res.json()) + self.logger.debug(f"query result: {result}") + query_id = res.headers.get("X-Axiom-History-Query-Id") + self.logger.info(f"received query result with query_id: {query_id}") + result.savedQueryID = query_id + return result + + def apl_query(self, apl: str, opts: AplOptions) -> QueryResult: + """Executes the given apl query on the dataset identified by its id.""" + return self.query(apl, opts) + + def query(self, apl: str, opts: AplOptions) -> QueryResult: + """Executes the given apl query on the dataset identified by its id.""" + path = "datasets/_apl" + payload = ujson.dumps( + self._prepare_apl_payload(apl, opts), + default=Util.handle_json_serialization, + ) + self.logger.debug("sending query %s" % payload) + params = self._prepare_apl_options(opts) + res = self.session.post(path, data=payload, params=params) + result = Util.from_dict(QueryResult, res.json()) + self.logger.debug(f"apl query result: {result}") + query_id = res.headers.get("X-Axiom-History-Query-Id") + self.logger.info(f"received query result with query_id: {query_id}") + result.savedQueryID = query_id + return result + + def _prepare_query_options(self, opts: QueryOptions) -> Dict[str, Any]: + """returns the query options as a Dict, handles any renaming for key fields.""" + if opts is None: + return {} + params = {} + if opts.streamingDuration: + params["streaming-duration"] = ( + opts.streamingDuration.seconds.__str__() + "s" + ) + if opts.saveAsKind: + params["saveAsKind"] = opts.saveAsKind.value + + params["nocache"] = opts.nocache.__str__() + + return params + + def _prepare_ingest_options( + self, opts: Optional[IngestOptions] = None + ) -> Dict[str, Any]: + """the query params for ingest api are expected in a format + that couldn't be defined as a variable name because it has a dash. + As a work around, we create the params dict manually.""" + + if opts is None: + return {} + + params = {} + if opts.timestamp_field: + params["timestamp-field"] = opts.timestamp_field + if opts.timestamp_format: + params["timestamp-format"] = opts.timestamp_format + if opts.CSV_delimiter: + params["csv-delimiter"] = opts.CSV_delimiter + + return params + + def _prepare_apl_options(self, opts: AplOptions) -> Dict[str, Any]: + """Prepare the apl query options for the request.""" + + if opts is None: + return {} + + params = {} + if opts.no_cache: + params["nocache"] = opts.no_cache.__str__() + if opts.save: + params["save"] = opts.save + if opts.format: + params["format"] = opts.format.value + + return params + + def _prepare_apl_payload(self, apl: str, opts: AplOptions) -> Dict[str, Any]: + """Prepare the apl query options for the request.""" + + params = {} + params["apl"] = apl + + if opts.start_time: + params["startTime"] = opts.start_time + if opts.end_time: + params["endTime"] = opts.end_time - self.datasets = DatasetsClient(session, logger) - self.users = UsersClient(session) + return params diff --git a/axiom/datasets.py b/axiom/datasets.py index c08fcba..cb52577 100644 --- a/axiom/datasets.py +++ b/axiom/datasets.py @@ -1,73 +1,11 @@ """This package provides dataset models and methods as well as a DatasetClient""" -import csv -import gzip import ujson -import ndjson -from enum import Enum from logging import Logger -from humps import decamelize from requests import Session -from typing import List, Dict, Optional +from typing import List, Dict from dataclasses import dataclass, asdict, field -from datetime import datetime, timedelta, timezone - +from datetime import datetime, timedelta from .util import Util -from .query import QueryLegacy, QueryOptions, QueryKind, QueryResult -from .query.result import QueryLegacyResult - - -@dataclass -class IngestFailure: - """The ingestion failure of a single event""" - - timestamp: datetime - error: str - - -@dataclass -class IngestStatus: - """The status after an event ingestion operation""" - - ingested: int - failed: int - failures: List[IngestFailure] - processed_bytes: int - blocks_created: int - wal_length: int - - -@dataclass -class IngestOptions: - """IngestOptions specifies the optional parameters for the Ingest and - IngestEvents method of the Datasets service.""" - - # timestamp field defines a custom field to extract the ingestion timestamp - # from. Defaults to `_time`. - timestamp_field: str = field(default="_time") - # timestamp format defines a custom format for the TimestampField. - # The reference time is `Mon Jan 2 15:04:05 -0700 MST 2006`, as specified - # in https://pkg.go.dev/time/?tab=doc#Parse. - timestamp_format: str = field(default=None) - # CSV delimiter is the delimiter that separates CSV fields. Only valid when - # the content to be ingested is CSV formatted. - CSV_delimiter: str = field(default=None) - - -class AplResultFormat(Enum): - """The result format of an APL query.""" - - Legacy = "legacy" - - -@dataclass -class AplOptions: - """AplOptions specifies the optional parameters for the apl query method.""" - - start_time: Optional[datetime] = field(default=None) - end_time: Optional[datetime] = field(default=None) - no_cache: bool = field(default=False) - save: bool = field(default=False) - format: AplResultFormat = field(default=AplResultFormat.Legacy) @dataclass @@ -96,21 +34,6 @@ class DatasetUpdateRequest: description: str -class ContentType(Enum): - """ContentType describes the content type of the data to ingest.""" - - JSON = "application/json" - NDJSON = "application/x-ndjson" - CSV = "text/csv" - - -class ContentEncoding(Enum): - """ContentEncoding describes the content encoding of the data to ingest.""" - - IDENTITY = "1" - GZIP = "gzip" - - @dataclass class TrimRequest: """MaxDuration marks the oldest timestamp an event can have before getting deleted.""" @@ -118,10 +41,6 @@ class TrimRequest: maxDuration: str -class WrongQueryKindException(Exception): - pass - - @dataclass class Field: """A field of a dataset""" @@ -161,49 +80,6 @@ def __init__(self, session: Session, logger: Logger): self.session = session self.logger = logger - def ingest( - self, - dataset: str, - payload: bytes, - contentType: ContentType, - enc: ContentEncoding, - opts: IngestOptions = None, - ) -> IngestStatus: - """Ingest the events into the named dataset and returns the status.""" - path = "datasets/%s/ingest" % dataset - - # check if passed content type and encoding are correct - if not contentType: - raise ValueError("unknown content-type, choose one of json,x-ndjson or csv") - - if not enc: - raise ValueError("unknown content-encoding") - - # set headers - headers = {"Content-Type": contentType.value, "Content-Encoding": enc.value} - # prepare query params - params = self._prepare_ingest_options(opts) - - # override the default header and set the value from the passed parameter - res = self.session.post(path, data=payload, headers=headers, params=params) - status_snake = decamelize(res.json()) - return Util.from_dict(IngestStatus, status_snake) - - def ingest_events( - self, - dataset: str, - events: List[dict], - opts: IngestOptions = None, - ) -> IngestStatus: - """Ingest the events into the named dataset and returns the status.""" - path = "datasets/%s/ingest" % dataset - - # encode request payload to NDJSON - content = ndjson.dumps(events).encode("UTF-8") - gzipped = gzip.compress(content) - - return self.ingest(dataset, gzipped, ContentType.NDJSON, ContentEncoding.GZIP) - def get(self, id: str) -> Dataset: """Get a dataset by id.""" path = "datasets/%s" % id @@ -244,50 +120,6 @@ def delete(self, id: str): path = "datasets/%s" % id self.session.delete(path) - def query_legacy( - self, id: str, query: QueryLegacy, opts: QueryOptions - ) -> QueryLegacyResult: - """Executes the given query on the dataset identified by its id.""" - if not opts.saveAsKind or (opts.saveAsKind == QueryKind.APL): - raise WrongQueryKindException( - "invalid query kind %s: must be %s or %s" - % (opts.saveAsKind, QueryKind.ANALYTICS, QueryKind.STREAM) - ) - - path = "datasets/%s/query" % id - payload = ujson.dumps(asdict(query), default=Util.handle_json_serialization) - self.logger.debug("sending query %s" % payload) - params = self._prepare_query_options(opts) - res = self.session.post(path, data=payload, params=params) - result = Util.from_dict(QueryLegacyResult, res.json()) - self.logger.debug(f"query result: {result}") - query_id = res.headers.get("X-Axiom-History-Query-Id") - self.logger.info(f"received query result with query_id: {query_id}") - result.savedQueryID = query_id - return result - - def apl_query(self, apl: str, opts: AplOptions) -> QueryResult: - """Executes the given apl query on the dataset identified by its id.""" - return self.query(apl, opts) - - def query(self, apl: str, opts: AplOptions) -> QueryResult: - """Executes the given apl query on the dataset identified by its id.""" - - path = "datasets/_apl" - payload = ujson.dumps( - self._prepare_apl_payload(apl, opts), - default=Util.handle_json_serialization, - ) - self.logger.debug("sending query %s" % payload) - params = self._prepare_apl_options(opts) - res = self.session.post(path, data=payload, params=params) - result = Util.from_dict(QueryResult, res.json()) - self.logger.debug(f"apl query result: {result}") - query_id = res.headers.get("X-Axiom-History-Query-Id") - self.logger.info(f"received query result with query_id: {query_id}") - result.savedQueryID = query_id - return result - def trim(self, id: str, maxDuration: timedelta): """ Trim the dataset identified by its id to a given length. The max duration @@ -299,69 +131,6 @@ def trim(self, id: str, maxDuration: timedelta): req = TrimRequest(f"{maxDuration.seconds}s") self.session.post(path, data=ujson.dumps(asdict(req))) - def _prepare_apl_options(self, opts: AplOptions) -> Dict[str, any]: - """Prepare the apl query options for the request.""" - - if opts is None: - return {} - - params = {} - if opts.no_cache: - params["nocache"] = opts.no_cache.__str__() - if opts.save: - params["save"] = opts.save - if opts.format: - params["format"] = opts.format.value - - return params - - def _prepare_apl_payload(self, apl: str, opts: AplOptions) -> Dict[str, any]: - """Prepare the apl query options for the request.""" - - params = {} - params["apl"] = apl - - if opts.start_time: - params["startTime"] = opts.start_time - if opts.end_time: - params["endTime"] = opts.end_time - - return params - - def _prepare_ingest_options(self, opts: IngestOptions) -> Dict[str, any]: - """the query params for ingest api are expected in a format - that couldn't be defined as a variable name because it has a dash. - As a work around, we create the params dict manually.""" - - if opts is None: - return {} - - params = {} - if opts.timestamp_field: - params["timestamp-field"] = opts.timestamp_field - if opts.timestamp_format: - params["timestamp-format"] = opts.timestamp_format - if opts.CSV_delimiter: - params["csv-delimiter"] = opts.CSV_delimiter - - return params - - def _prepare_query_options(self, opts: QueryOptions) -> Dict[str, any]: - """returns the query options as a Dict, handles any renaming for key fields.""" - if opts is None: - return {} - params = {} - if opts.streamingDuration: - params["streaming-duration"] = ( - opts.streamingDuration.seconds.__str__() + "s" - ) - if opts.saveAsKind: - params["saveAsKind"] = opts.saveAsKind.value - - params["nocache"] = opts.nocache.__str__() - - return params - def info(self, id: str) -> DatasetInfo: """Returns the info about a dataset.""" path = "datasets/%s/info" % id diff --git a/tests/test_client.py b/tests/test_client.py index cddb5d6..b84b615 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1,7 +1,34 @@ +"""This module contains the tests for the axiom client.""" import os import unittest +import gzip +import ujson +import rfc3339 import responses -from axiom import Client +from logging import getLogger +from datetime import datetime, timedelta +from .helpers import get_random_name +from axiom import ( + Client, + AplOptions, + AplResultFormat, + ContentEncoding, + ContentType, + IngestOptions, + WrongQueryKindException, +) +from axiom.query import ( + QueryLegacy, + QueryOptions, + QueryKind, + Filter, + Order, + VirtualField, + Projection, + FilterOperation, + Aggregation, + AggregationOperation, +) class TestClient(unittest.TestCase): @@ -10,11 +37,41 @@ class TestClient(unittest.TestCase): @classmethod def setUpClass(cls): + cls.logger = getLogger() cls.client = Client( os.getenv("AXIOM_TOKEN"), os.getenv("AXIOM_ORG_ID"), os.getenv("AXIOM_URL"), ) + cls.dataset_name = get_random_name() + cls.logger.info(f"generated random dataset name is: {cls.dataset_name}") + events_time_format = "%d/%b/%Y:%H:%M:%S +0000" + # create events to ingest and query + time = datetime.utcnow() - timedelta(minutes=1) + time_formatted = time.strftime(events_time_format) + cls.logger.info(f"time_formatted: {time_formatted}") + cls.events = [ + { + "_time": time_formatted, + "remote_ip": "93.180.71.3", + "remote_user": "-", + "request": "GET /downloads/product_1 HTTP/1.1", + "response": 304, + "bytes": 0, + "referrer": "-", + "agent": "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)", + }, + { + "_time": time_formatted, + "remote_ip": "93.180.71.3", + "remote_user": "-", + "request": "GET /downloads/product_1 HTTP/1.1", + "response": 304, + "bytes": 0, + "referrer": "-", + "agent": "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)", + }, + ] @responses.activate def test_retries(self): @@ -30,3 +87,149 @@ def test_retries(self): resp = self.client.datasets.get("test") assert len(responses.calls) == 3 + + def test_step002_ingest(self): + """Tests the ingest endpoint""" + data: bytes = ujson.dumps(self.events).encode() + payload = gzip.compress(data) + opts = IngestOptions( + "_time", + "2/Jan/2006:15:04:05 +0000", + # CSV_delimiter obviously not valid for JSON, but perfectly fine to test for its presence in this test. + ";", + ) + res = self.client.ingest( + self.dataset_name, + payload=payload, + contentType=ContentType.JSON, + enc=ContentEncoding.GZIP, + opts=opts, + ) + self.logger.debug(res) + + assert ( + res.ingested == 2 + ), f"expected ingested count to equal 2, found {res.ingested}" + self.logger.info("ingested 2 events successfully.") + + def test_step003_ingest_events(self): + """Tests the ingest_events method""" + time = datetime.utcnow() - timedelta(hours=1) + time_formatted = rfc3339.format(time) + + res = self.client.ingest_events( + dataset=self.dataset_name, + events=[ + {"foo": "bar", "_time": time_formatted}, + {"bar": "baz", "_time": time_formatted}, + ], + ) + self.logger.debug(res) + + assert ( + res.ingested == 2 + ), f"expected ingested count to equal 2, found {res.ingested}" + + def test_step003_ingest_wrong_encoding(self): + try: + self.client.ingest("", "", ContentType.JSON, "") + except ValueError as err: + self.logger.debug(err) + self.logger.debug( + "Exception was raised for wrong content-encoding, as expected." + ) + return + + self.fail("error should have been thrown for wrong content-encoding") + + def test_step003_ingest_wrong_content_type(self): + try: + self.client.ingest("", "", "", ContentEncoding.GZIP) + except ValueError as err: + self.logger.debug(err) + self.logger.debug( + "Exception was raised for wrong content-type, as expected." + ) + return + + self.fail("error should have been thrown for wrong content-type") + + def test_step007_query(self): + """Test querying a dataset""" + # query the events we ingested in step2 + startTime = datetime.utcnow() - timedelta(minutes=2) + endTime = datetime.utcnow() + + q = QueryLegacy(startTime=startTime, endTime=endTime) + opts = QueryOptions( + streamingDuration=timedelta(seconds=60), + nocache=True, + saveAsKind=QueryKind.ANALYTICS, + ) + qr = self.client.query_legacy(self.dataset_name, q, opts) + + self.assertIsNotNone(qr.savedQueryID) + self.assertEqual(len(qr.matches), len(self.events)) + + def test_step007_apl_query(self): + """Test apl query""" + # query the events we ingested in step2 + startTime = datetime.utcnow() - timedelta(minutes=2) + endTime = datetime.utcnow() + + apl = "['%s']" % self.dataset_name + opts = AplOptions( + start_time=startTime, + end_time=endTime, + no_cache=True, + save=False, + format=AplResultFormat.Legacy, + ) + qr = self.client.query(apl, opts) + + self.assertEqual(len(qr.matches), len(self.events)) + + def test_step007_wrong_query_kind(self): + """Test wrong query kind""" + startTime = datetime.utcnow() - timedelta(minutes=2) + endTime = datetime.utcnow() + opts = QueryOptions( + streamingDuration=timedelta(seconds=60), + nocache=True, + saveAsKind=QueryKind.APL, + ) + q = QueryLegacy(startTime, endTime) + + try: + self.client.query_legacy(self.dataset_name, q, opts) + except WrongQueryKindException as err: + self.logger.info("passing kind apl to query raised exception as expected") + return + + self.fail("was excepting WrongQueryKindException") + + def test_step007_complex_query(self): + """Test complex query""" + startTime = datetime.utcnow() - timedelta(minutes=2) + endTime = datetime.utcnow() + aggregations = [ + Aggregation(alias="event_count", op=AggregationOperation.COUNT, field="*") + ] + q = QueryLegacy(startTime, endTime, aggregations=aggregations) + q.groupBy = ["success", "remote_ip"] + q.filter = Filter(FilterOperation.EQUAL, "response", 304) + q.order = [ + Order("success", True), + Order("remote_ip", False), + ] + q.virtualFields = [VirtualField("success", "response < 400")] + q.project = [Projection("remote_ip", "ip")] + + res = self.client.query_legacy(self.dataset_name, q, QueryOptions()) + + # self.assertEqual(len(self.events), res.status.rowsExamined) + self.assertEqual(len(self.events), res.status.rowsMatched) + + if len(res.buckets.totals): + agg = res.buckets.totals[0].aggregations[0] + self.assertEqual("event_count", agg.op) diff --git a/tests/test_datasets.py b/tests/test_datasets.py index 35141f9..e692484 100644 --- a/tests/test_datasets.py +++ b/tests/test_datasets.py @@ -1,49 +1,20 @@ """This module contains the tests for the DatasetsClient.""" import os -import gzip -import ujson + import unittest -import rfc3339 -import time from typing import List, Dict, Any from logging import getLogger -from .helpers import get_random_name, parse_time +from .helpers import get_random_name from axiom import ( Client, DatasetCreateRequest, DatasetUpdateRequest, - ContentEncoding, - ContentType, - IngestOptions, - WrongQueryKindException, -) -from axiom.query import ( - QueryResult, - QueryLegacy, - QueryOptions, - QueryKind, - Filter, - Order, - VirtualField, - Projection, - FilterOperation, -) -from axiom.query.result import ( - QueryLegacyResult, - QueryStatus, - Entry, - EntryGroup, - Timeseries, - Interval, -) -from axiom.datasets import ( - AplOptions, - AplResultFormat, ) + from axiom.query.aggregation import Aggregation, AggregationOperation from requests.exceptions import HTTPError -from datetime import datetime, timedelta +from datetime import timedelta class TestDatasets(unittest.TestCase): @@ -60,33 +31,6 @@ def setUpClass(cls): cls.dataset_name = get_random_name() cls.logger.info(f"generated random dataset name is: {cls.dataset_name}") - # create events to ingest and query - # cls.events_time_format = "%d/%b/%Y:%H:%M:%S +0000" - time = datetime.utcnow() - timedelta(minutes=1) - time_formatted = time.strftime(cls.events_time_format) - cls.logger.info(f"time_formatted: {time_formatted}") - cls.events = [ - { - "_time": time_formatted, - "remote_ip": "93.180.71.3", - "remote_user": "-", - "request": "GET /downloads/product_1 HTTP/1.1", - "response": 304, - "bytes": 0, - "referrer": "-", - "agent": "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)", - }, - { - "_time": time_formatted, - "remote_ip": "93.180.71.3", - "remote_user": "-", - "request": "GET /downloads/product_1 HTTP/1.1", - "response": 304, - "bytes": 0, - "referrer": "-", - "agent": "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)", - }, - ] cls.client = Client( os.getenv("AXIOM_TOKEN"), os.getenv("AXIOM_ORG_ID"), @@ -103,174 +47,28 @@ def test_step001_create(self): self.logger.debug(res) assert res.name == self.dataset_name - def test_step002_ingest(self): - """Tests the ingest endpoint""" - data: bytes = ujson.dumps(self.events).encode() - payload = gzip.compress(data) - opts = IngestOptions( - "_time", - "2/Jan/2006:15:04:05 +0000", - # CSV_delimiter obviously not valid for JSON, but perfectly fine to test for its presence in this test. - ";", - ) - res = self.client.datasets.ingest( - self.dataset_name, - payload=payload, - contentType=ContentType.JSON, - enc=ContentEncoding.GZIP, - opts=opts, - ) - self.logger.debug(res) - - assert ( - res.ingested == 2 - ), f"expected ingested count to equal 2, found {res.ingested}" - self.logger.info("ingested 2 events successfully.") - - def test_step003_ingest_events(self): - """Tests the ingest_events method""" - time = datetime.utcnow() - timedelta(hours=1) - time_formatted = rfc3339.format(time) - - res = self.client.datasets.ingest_events( - dataset=self.dataset_name, - events=[ - {"foo": "bar", "_time": time_formatted}, - {"bar": "baz", "_time": time_formatted}, - ], - ) - self.logger.debug(res) - - assert ( - res.ingested == 2 - ), f"expected ingested count to equal 2, found {res.ingested}" - - def test_step003_ingest_wrong_encoding(self): - try: - self.client.datasets.ingest("", "", ContentType.JSON, "") - except ValueError as err: - self.logger.debug(err) - self.logger.debug( - "Exception was raised for wrong content-encoding, as expected." - ) - return - - self.fail("error should have been thrown for wrong content-encoding") - - def test_step003_ingest_wrong_content_type(self): - try: - self.client.datasets.ingest("", "", "", ContentEncoding.GZIP) - except ValueError as err: - self.logger.debug(err) - self.logger.debug( - "Exception was raised for wrong content-type, as expected." - ) - return - - self.fail("error should have been thrown for wrong content-type") - - def test_step004_get(self): + def test_step002_get(self): """Tests get dataset endpoint""" dataset = self.client.datasets.get(self.dataset_name) self.logger.debug(dataset) assert dataset.name == self.dataset_name - def test_step005_list(self): + def test_step003_list(self): """Tests list datasets endpoint""" datasets = self.client.datasets.get_list() self.logger.debug(datasets) assert len(datasets) > 0 - def test_step006_update(self): + def test_step004_update(self): """Tests update dataset endpoint""" updateReq = DatasetUpdateRequest("updated name through test") ds = self.client.datasets.update(self.dataset_name, updateReq) assert ds.description == updateReq.description - def test_step007_query(self): - """Test querying a dataset""" - # query the events we ingested in step2 - startTime = datetime.utcnow() - timedelta(minutes=2) - endTime = datetime.utcnow() - - q = QueryLegacy(startTime=startTime, endTime=endTime) - opts = QueryOptions( - streamingDuration=timedelta(seconds=60), - nocache=True, - saveAsKind=QueryKind.ANALYTICS, - ) - qr = self.client.datasets.query_legacy(self.dataset_name, q, opts) - - self.assertIsNotNone(qr.savedQueryID) - self.assertEqual(len(qr.matches), len(self.events)) - - def test_step007_apl_query(self): - """Test apl query""" - # query the events we ingested in step2 - startTime = datetime.utcnow() - timedelta(minutes=2) - endTime = datetime.utcnow() - - apl = "['%s']" % self.dataset_name - opts = AplOptions( - start_time=startTime, - end_time=endTime, - no_cache=True, - save=False, - format=AplResultFormat.Legacy, - ) - qr = self.client.datasets.query(apl, opts) - - self.assertEqual(len(qr.matches), len(self.events)) - - def test_step007_wrong_query_kind(self): - """Test wrong query kind""" - startTime = datetime.utcnow() - timedelta(minutes=2) - endTime = datetime.utcnow() - opts = QueryOptions( - streamingDuration=timedelta(seconds=60), - nocache=True, - saveAsKind=QueryKind.APL, - ) - q = QueryLegacy(startTime, endTime) - - try: - self.client.datasets.query_legacy(self.dataset_name, q, opts) - except WrongQueryKindException as err: - self.logger.info("passing kind apl to query raised exception as expected") - return - - self.fail("was excepting WrongQueryKindException") - - def test_step007_complex_query(self): - """Test complex query""" - startTime = datetime.utcnow() - timedelta(minutes=2) - endTime = datetime.utcnow() - aggregations = [ - Aggregation(alias="event_count", op=AggregationOperation.COUNT, field="*") - ] - q = QueryLegacy(startTime, endTime, aggregations=aggregations) - q.groupBy = ["success", "remote_ip"] - q.filter = Filter(FilterOperation.EQUAL, "response", 304) - q.order = [ - Order("success", True), - Order("remote_ip", False), - ] - q.virtualFields = [VirtualField("success", "response < 400")] - q.project = [Projection("remote_ip", "ip")] - - res = self.client.datasets.query_legacy(self.dataset_name, q, QueryOptions()) - - # self.assertEqual(len(self.events), res.status.rowsExamined) - self.assertEqual(len(self.events), res.status.rowsMatched) - - if len(res.buckets.totals): - agg = res.buckets.totals[0].aggregations[0] - self.assertEqual("event_count", agg.op) - - def test_step009_trim(self): + def test_step005_trim(self): """Tests dataset trim endpoint""" self.client.datasets.trim(self.dataset_name, timedelta(seconds=1)) From 3cada129586eeecb5fe0aa1ccc0f10cc8ef62d96 Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 14:44:54 +0200 Subject: [PATCH 05/11] dx-383: fix logging class to use ingest from top-level client --- axiom/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/axiom/logging.py b/axiom/logging.py index 385bff1..e46c34f 100644 --- a/axiom/logging.py +++ b/axiom/logging.py @@ -21,4 +21,4 @@ def __init__(self, client: Client, dataset: str, level=NOTSET): def emit(self, record): # FIXME: Don't do an ingest call for every event - self.client.datasets.ingest_events(self.dataset, [record.__dict__]) + self.client.ingest_events(self.dataset, [record.__dict__]) From b37aa25c7a256e205b8e5bc0ba558e636c4b743d Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 14:57:32 +0200 Subject: [PATCH 06/11] create and delete dataset for TestClient --- tests/test_client.py | 38 ++++++++++++++++++++++++++++++++------ tests/test_datasets.py | 9 +++------ 2 files changed, 35 insertions(+), 12 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index b84b615..d2adaaf 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -8,6 +8,7 @@ from logging import getLogger from datetime import datetime, timedelta from .helpers import get_random_name +from requests.exceptions import HTTPError from axiom import ( Client, AplOptions, @@ -16,6 +17,7 @@ ContentType, IngestOptions, WrongQueryKindException, + DatasetCreateRequest, ) from axiom.query import ( QueryLegacy, @@ -72,6 +74,12 @@ def setUpClass(cls): "agent": "Debian APT-HTTP/1.3 (0.8.16~exp12ubuntu10.21)", }, ] + # create dataset to test the client + req = DatasetCreateRequest( + name=cls.dataset_name, + description="create a dataset to test the python client", + ) + cls.client.datasets.create(req) @responses.activate def test_retries(self): @@ -88,7 +96,7 @@ def test_retries(self): resp = self.client.datasets.get("test") assert len(responses.calls) == 3 - def test_step002_ingest(self): + def test_step001_ingest(self): """Tests the ingest endpoint""" data: bytes = ujson.dumps(self.events).encode() payload = gzip.compress(data) @@ -112,7 +120,7 @@ def test_step002_ingest(self): ), f"expected ingested count to equal 2, found {res.ingested}" self.logger.info("ingested 2 events successfully.") - def test_step003_ingest_events(self): + def test_step002_ingest_events(self): """Tests the ingest_events method""" time = datetime.utcnow() - timedelta(hours=1) time_formatted = rfc3339.format(time) @@ -154,7 +162,7 @@ def test_step003_ingest_wrong_content_type(self): self.fail("error should have been thrown for wrong content-type") - def test_step007_query(self): + def test_step004_query(self): """Test querying a dataset""" # query the events we ingested in step2 startTime = datetime.utcnow() - timedelta(minutes=2) @@ -171,7 +179,7 @@ def test_step007_query(self): self.assertIsNotNone(qr.savedQueryID) self.assertEqual(len(qr.matches), len(self.events)) - def test_step007_apl_query(self): + def test_step005_apl_query(self): """Test apl query""" # query the events we ingested in step2 startTime = datetime.utcnow() - timedelta(minutes=2) @@ -189,7 +197,7 @@ def test_step007_apl_query(self): self.assertEqual(len(qr.matches), len(self.events)) - def test_step007_wrong_query_kind(self): + def test_step005_wrong_query_kind(self): """Test wrong query kind""" startTime = datetime.utcnow() - timedelta(minutes=2) endTime = datetime.utcnow() @@ -208,7 +216,7 @@ def test_step007_wrong_query_kind(self): self.fail("was excepting WrongQueryKindException") - def test_step007_complex_query(self): + def test_step005_complex_query(self): """Test complex query""" startTime = datetime.utcnow() - timedelta(minutes=2) endTime = datetime.utcnow() @@ -233,3 +241,21 @@ def test_step007_complex_query(self): if len(res.buckets.totals): agg = res.buckets.totals[0].aggregations[0] self.assertEqual("event_count", agg.op) + + @classmethod + def tearDownClass(cls): + """A teardown that checks if the dataset still exists and deletes it, + otherwise we might run into zombie datasets on failures.""" + cls.logger.info("cleaning up after TestClient...") + try: + ds = cls.client.datasets.get(cls.dataset_name) + if ds: + cls.client.datasets.delete(cls.dataset_name) + cls.logger.info( + "dataset (%s) was not deleted as part of the test, deleting it now." + % cls.dataset_name + ) + except HTTPError as err: + # nothing to do here, since the dataset doesn't exist + cls.logger.warning(err) + cls.logger.info("finish cleaning up after TestClient") diff --git a/tests/test_datasets.py b/tests/test_datasets.py index e692484..72ed220 100644 --- a/tests/test_datasets.py +++ b/tests/test_datasets.py @@ -4,6 +4,8 @@ import unittest from typing import List, Dict, Any from logging import getLogger +from requests.exceptions import HTTPError +from datetime import timedelta from .helpers import get_random_name from axiom import ( Client, @@ -11,11 +13,6 @@ DatasetUpdateRequest, ) -from axiom.query.aggregation import Aggregation, AggregationOperation - -from requests.exceptions import HTTPError -from datetime import timedelta - class TestDatasets(unittest.TestCase): @@ -90,7 +87,7 @@ def test_step999_delete(self): @classmethod def tearDownClass(cls): - """A teardown that checks if the dataset still exists and deletes it would be great, + """A teardown that checks if the dataset still exists and deletes it, otherwise we might run into zombie datasets on failures.""" cls.logger.info("cleaning up after TestDatasets...") try: From 7c0d6945cbd31072383db9e76248a2b55c8bcf0e Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 15:04:39 +0200 Subject: [PATCH 07/11] Update README.md Co-authored-by: Arne Bahlo --- README.md | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/README.md b/README.md index c592db8..bc19e18 100644 --- a/README.md +++ b/README.md @@ -62,13 +62,9 @@ client.datasets.query(r"['my-dataset'] | where foo == 'bar' | limit 100") You can also configure the client using options passed to the client constructor: ```py -import os import axiom -access_token = os.getenv("AXIOM_TOKEN") -org_id = os.getenv("AXIOM_ORG_ID") - -client = axiom.Client(access_token, org_id) +client = axiom.Client("", "") ``` ## Contributing From 5406e2cd04f01a81c0303c1bd20fb61dd398d47b Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 15:04:51 +0200 Subject: [PATCH 08/11] Update axiom/client.py Co-authored-by: Arne Bahlo --- axiom/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/axiom/client.py b/axiom/client.py index f378068..0d64efa 100644 --- a/axiom/client.py +++ b/axiom/client.py @@ -220,7 +220,7 @@ def ingest_events( ) def query_legacy( - self, id: str, query: QueryLegacy, opts: QueryOptions + self, id: str, query: QueryLegacy, opts: QueryOptions = {} ) -> QueryLegacyResult: """Executes the given query on the dataset identified by its id.""" if not opts.saveAsKind or (opts.saveAsKind == QueryKind.APL): From faed16600e56ac07eef1becc9a49ea52a750d8ee Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 15:06:18 +0200 Subject: [PATCH 09/11] move custom client docs above example code block --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index bc19e18..94a15fd 100644 --- a/README.md +++ b/README.md @@ -39,6 +39,14 @@ If you use the [Axiom CLI](https://github.com/axiomhq/cli), run `eval $(axiom co Otherwise create a personal token in [the Axiom settings](https://cloud.axiom.co/settings/profile) and export it as `AXIOM_TOKEN`. Set `AXIOM_ORG_ID` to the organization ID from the settings page of the organization you want to access. +You can also configure the client using options passed to the client constructor: + +```py +import axiom + +client = axiom.Client("", "") +``` + Create and use a client like this: ```py @@ -59,14 +67,6 @@ client.datasets.ingest_events( client.datasets.query(r"['my-dataset'] | where foo == 'bar' | limit 100") ``` -You can also configure the client using options passed to the client constructor: - -```py -import axiom - -client = axiom.Client("", "") -``` - ## Contributing This project uses [Poetry](https://python-poetry.org) for dependecy management From b7af779374a15e098a433a78abeb30bc23082ca9 Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 15:20:39 +0200 Subject: [PATCH 10/11] improve optional params typings --- axiom/client.py | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/axiom/client.py b/axiom/client.py index 0d64efa..08c436b 100644 --- a/axiom/client.py +++ b/axiom/client.py @@ -25,9 +25,9 @@ @dataclass class Error: - status: int = field(default=None) - message: str = field(default=None) - error: str = field(default=None) + status: Optional[int] = field(default=None) + message: Optional[str] = field(default=None) + error: Optional[str] = field(default=None) @dataclass @@ -61,10 +61,10 @@ class IngestOptions: # timestamp format defines a custom format for the TimestampField. # The reference time is `Mon Jan 2 15:04:05 -0700 MST 2006`, as specified # in https://pkg.go.dev/time/?tab=doc#Parse. - timestamp_format: str = field(default=None) + timestamp_format: Optional[str] = field(default=None) # CSV delimiter is the delimiter that separates CSV fields. Only valid when # the content to be ingested is CSV formatted. - CSV_delimiter: str = field(default=None) + CSV_delimiter: Optional[str] = field(default=None) class AplResultFormat(Enum): @@ -220,7 +220,7 @@ def ingest_events( ) def query_legacy( - self, id: str, query: QueryLegacy, opts: QueryOptions = {} + self, id: str, query: QueryLegacy, opts: QueryOptions ) -> QueryLegacyResult: """Executes the given query on the dataset identified by its id.""" if not opts.saveAsKind or (opts.saveAsKind == QueryKind.APL): @@ -241,11 +241,11 @@ def query_legacy( result.savedQueryID = query_id return result - def apl_query(self, apl: str, opts: AplOptions) -> QueryResult: + def apl_query(self, apl: str, opts: Optional[AplOptions]) -> QueryResult: """Executes the given apl query on the dataset identified by its id.""" return self.query(apl, opts) - def query(self, apl: str, opts: AplOptions) -> QueryResult: + def query(self, apl: str, opts: Optional[AplOptions]) -> QueryResult: """Executes the given apl query on the dataset identified by its id.""" path = "datasets/_apl" payload = ujson.dumps( @@ -278,9 +278,7 @@ def _prepare_query_options(self, opts: QueryOptions) -> Dict[str, Any]: return params - def _prepare_ingest_options( - self, opts: Optional[IngestOptions] = None - ) -> Dict[str, Any]: + def _prepare_ingest_options(self, opts: Optional[IngestOptions]) -> Dict[str, Any]: """the query params for ingest api are expected in a format that couldn't be defined as a variable name because it has a dash. As a work around, we create the params dict manually.""" @@ -298,7 +296,7 @@ def _prepare_ingest_options( return params - def _prepare_apl_options(self, opts: AplOptions) -> Dict[str, Any]: + def _prepare_apl_options(self, opts: Optional[AplOptions]) -> Dict[str, Any]: """Prepare the apl query options for the request.""" if opts is None: @@ -314,8 +312,12 @@ def _prepare_apl_options(self, opts: AplOptions) -> Dict[str, Any]: return params - def _prepare_apl_payload(self, apl: str, opts: AplOptions) -> Dict[str, Any]: + def _prepare_apl_payload( + self, apl: str, opts: Optional[AplOptions] + ) -> Dict[str, Any]: """Prepare the apl query options for the request.""" + if opts is None: + return {} params = {} params["apl"] = apl From 69863eeeb7a70b85cefea30081efc6778d0a2d9f Mon Sep 17 00:00:00 2001 From: Islam Shehata Date: Thu, 24 Nov 2022 17:55:16 +0200 Subject: [PATCH 11/11] pump the version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index b2805c0..20d0236 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "axiom-py" -version = "0.1.0-beta.4" +version = "0.1.0-beta.5" description = "Axiom API Python bindings." authors = ["Axiom, Inc."] license = "MIT"