diff --git a/datastore/samples/snippets/noxfile.py b/datastore/samples/snippets/noxfile.py new file mode 100644 index 00000000000..69bcaf56de6 --- /dev/null +++ b/datastore/samples/snippets/noxfile.py @@ -0,0 +1,292 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8") + else: + session.install("flake8", "flake8-annotations") + + args = FLAKE8_COMMON_ARGS + [ + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# format = isort + black +# + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list.extend(glob.glob("**/tests", recursive=True)) + + if len(test_list) == 0: + print("No tests found, skipping directory.") + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/datastore/samples/snippets/noxfile_config.py b/datastore/samples/snippets/noxfile_config.py new file mode 100644 index 00000000000..7bf43541d12 --- /dev/null +++ b/datastore/samples/snippets/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} diff --git a/datastore/samples/snippets/requirements-test.txt b/datastore/samples/snippets/requirements-test.txt new file mode 100644 index 00000000000..2a21e952015 --- /dev/null +++ b/datastore/samples/snippets/requirements-test.txt @@ -0,0 +1,7 @@ +backoff===1.11.1; python_version < "3.7" +backoff==2.2.1; python_version >= "3.7" +pytest===7.4.3; python_version == '3.7' +pytest===8.3.5; python_version == '3.8' +pytest===8.4.2; python_version == '3.9' +pytest==9.0.2; python_version >= '3.10' +flaky==3.8.1 diff --git a/datastore/samples/snippets/requirements.txt b/datastore/samples/snippets/requirements.txt new file mode 100644 index 00000000000..7852f23b24e --- /dev/null +++ b/datastore/samples/snippets/requirements.txt @@ -0,0 +1 @@ +google-cloud-datastore==2.23.0 \ No newline at end of file diff --git a/datastore/samples/snippets/schedule-export/README.md b/datastore/samples/snippets/schedule-export/README.md new file mode 100644 index 00000000000..a8501cddc34 --- /dev/null +++ b/datastore/samples/snippets/schedule-export/README.md @@ -0,0 +1,5 @@ +# Scheduling Datastore exports with Cloud Functions and Cloud Scheduler + +This sample application demonstrates how to schedule exports of your Datastore entities. To deploy this sample, see: + +[Scheduling exports](https://cloud.google.com/datastore/docs/schedule-export) diff --git a/datastore/samples/snippets/schedule-export/main.py b/datastore/samples/snippets/schedule-export/main.py new file mode 100644 index 00000000000..f91b1466913 --- /dev/null +++ b/datastore/samples/snippets/schedule-export/main.py @@ -0,0 +1,57 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import json +import os + +from google.cloud import datastore_admin_v1 + +project_id = os.environ.get("GCP_PROJECT") +client = datastore_admin_v1.DatastoreAdminClient() + + +def datastore_export(event, context): + """Triggers a Datastore export from a Cloud Scheduler job. + + Args: + event (dict): event[data] must contain a json object encoded in + base-64. Cloud Scheduler encodes payloads in base-64 by default. + Object must include a 'bucket' value and can include 'kinds' + and 'namespaceIds' values. + context (google.cloud.functions.Context): The Cloud Functions event + metadata. + """ + if "data" in event: + # Triggered via Cloud Scheduler, decode the inner data field of the json payload. + json_data = json.loads(base64.b64decode(event["data"]).decode("utf-8")) + else: + # Otherwise, for instance if triggered via the Cloud Console on a Cloud Function, the event is the data. + json_data = event + + bucket = json_data["bucket"] + entity_filter = datastore_admin_v1.EntityFilter() + + if "kinds" in json_data: + entity_filter.kinds = json_data["kinds"] + + if "namespaceIds" in json_data: + entity_filter.namespace_ids = json_data["namespaceIds"] + + export_request = datastore_admin_v1.ExportEntitiesRequest( + project_id=project_id, output_url_prefix=bucket, entity_filter=entity_filter + ) + operation = client.export_entities(request=export_request) + response = operation.result() + print(response) diff --git a/datastore/samples/snippets/schedule-export/noxfile.py b/datastore/samples/snippets/schedule-export/noxfile.py new file mode 100644 index 00000000000..69bcaf56de6 --- /dev/null +++ b/datastore/samples/snippets/schedule-export/noxfile.py @@ -0,0 +1,292 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8") + else: + session.install("flake8", "flake8-annotations") + + args = FLAKE8_COMMON_ARGS + [ + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# format = isort + black +# + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list.extend(glob.glob("**/tests", recursive=True)) + + if len(test_list) == 0: + print("No tests found, skipping directory.") + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/datastore/samples/snippets/schedule-export/noxfile_config.py b/datastore/samples/snippets/schedule-export/noxfile_config.py new file mode 100644 index 00000000000..7bf43541d12 --- /dev/null +++ b/datastore/samples/snippets/schedule-export/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} diff --git a/datastore/samples/snippets/schedule-export/requirements-test.txt b/datastore/samples/snippets/schedule-export/requirements-test.txt new file mode 100644 index 00000000000..cb982446b31 --- /dev/null +++ b/datastore/samples/snippets/schedule-export/requirements-test.txt @@ -0,0 +1,2 @@ +pytest===8.4.2; python_version == '3.9' +pytest==9.0.2; python_version >= '3.10' diff --git a/datastore/samples/snippets/schedule-export/requirements.txt b/datastore/samples/snippets/schedule-export/requirements.txt new file mode 100644 index 00000000000..fa16c1e95ab --- /dev/null +++ b/datastore/samples/snippets/schedule-export/requirements.txt @@ -0,0 +1 @@ +google-cloud-datastore==2.23.0 diff --git a/datastore/samples/snippets/schedule-export/schedule_export_test.py b/datastore/samples/snippets/schedule-export/schedule_export_test.py new file mode 100644 index 00000000000..48d9147c923 --- /dev/null +++ b/datastore/samples/snippets/schedule-export/schedule_export_test.py @@ -0,0 +1,73 @@ +# Copyright 2019 Google LLC All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +from unittest.mock import Mock + +import main + +mock_context = Mock() +mock_context.event_id = "617187464135194" +mock_context.timestamp = "2020-04-15T22:09:03.761Z" + + +def test_datastore_export(capsys): + # Test an export without an entity filter + bucket = "gs://my-bucket" + json_string = '{{ "bucket": "{bucket}" }}'.format(bucket=bucket) + + # Encode data like Cloud Scheduler + data = bytes(json_string, "utf-8") + data_encoded = base64.b64encode(data) + event = {"data": data_encoded} + + # Mock the Datastore service + mockDatastore = Mock() + main.client = mockDatastore + + # Call tested function + main.datastore_export(event, mock_context) + out, err = capsys.readouterr() + export_args = mockDatastore.export_entities.call_args[1] + # Assert request includes test values + assert export_args["request"].output_url_prefix == bucket + + +def test_datastore_export_entity_filter(capsys): + # Test an export with an entity filter + bucket = "gs://my-bucket" + kinds = "Users,Tasks" + namespaceIds = "Customer831,Customer157" + json_string = '{{ "bucket": "{bucket}", "kinds": "{kinds}", "namespaceIds": "{namespaceIds}" }}'.format( + bucket=bucket, kinds=kinds, namespaceIds=namespaceIds + ) + + # Encode data like Cloud Scheduler + data = bytes(json_string, "utf-8") + data_encoded = base64.b64encode(data) + event = {"data": data_encoded} + + # Mock the Datastore service + mockDatastore = Mock() + main.client = mockDatastore + + # Call tested function + main.datastore_export(event, mock_context) + out, err = capsys.readouterr() + export_args = mockDatastore.export_entities.call_args[1] + # Assert request includes test values + + assert export_args["request"].output_url_prefix == bucket + assert export_args["request"].entity_filter.kinds == kinds + assert export_args["request"].entity_filter.namespace_ids == namespaceIds diff --git a/datastore/samples/snippets/snippets.py b/datastore/samples/snippets/snippets.py new file mode 100644 index 00000000000..1b86ba8b0cd --- /dev/null +++ b/datastore/samples/snippets/snippets.py @@ -0,0 +1,513 @@ +# Copyright 2022 Google, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +from datetime import datetime, timedelta, timezone +from pprint import pprint +import time + +from google.cloud import datastore # noqa: I100 + + +def _preamble(): + # [START datastore_size_coloration_query] + from google.cloud import datastore + + # For help authenticating your client, visit + # https://cloud.google.com/docs/authentication/getting-started + client = datastore.Client() + + # [END datastore_size_coloration_query] + assert client is not None + + +def in_query(client): + # [START datastore_in_query] + query = client.query(kind="Task") + query.add_filter("tag", "IN", ["learn", "study"]) + # [END datastore_in_query] + + return list(query.fetch()) + + +def not_equals_query(client): + # [START datastore_not_equals_query] + query = client.query(kind="Task") + query.add_filter("category", "!=", "work") + # [END datastore_not_equals_query] + + return list(query.fetch()) + + +def not_in_query(client): + # [START datastore_not_in_query] + query = client.query(kind="Task") + query.add_filter("category", "NOT_IN", ["work", "chores", "school"]) + # [END datastore_not_in_query] + + return list(query.fetch()) + + +def query_with_readtime(client): + # [START datastore_stale_read] + # Create a read time of 15 seconds in the past + read_time = datetime.now(timezone.utc) - timedelta(seconds=15) + + # Fetch an entity with read_time + task_key = client.key("Task", "sampletask") + entity = client.get(task_key, read_time=read_time) + + # Query Task entities with read_time + query = client.query(kind="Task") + tasks = query.fetch(read_time=read_time, limit=10) + # [END datastore_stale_read] + + results = list(tasks) + results.append(entity) + + return results + + +def count_query_in_transaction(client): + # [START datastore_count_in_transaction] + task1 = datastore.Entity(client.key("Task", "task1")) + task2 = datastore.Entity(client.key("Task", "task2")) + + task1["owner"] = "john" + task2["owner"] = "john" + + tasks = [task1, task2] + client.put_multi(tasks) + + with client.transaction() as transaction: + + tasks_of_john = client.query(kind="Task") + tasks_of_john.add_filter("owner", "=", "john") + total_tasks_query = client.aggregation_query(tasks_of_john) + + query_result = total_tasks_query.count(alias="tasks_count").fetch() + for task_result in query_result: + tasks_count = task_result[0] + if tasks_count.value < 2: + task3 = datastore.Entity(client.key("Task", "task3")) + task3["owner"] = "john" + transaction.put(task3) + tasks.append(task3) + else: + print(f"Found existing {tasks_count.value} tasks, rolling back") + client.entities_to_delete.extend(tasks) + raise ValueError("User 'John' cannot have more than 2 tasks") + # [END datastore_count_in_transaction] + + +def count_query_on_kind(client): + # [START datastore_count_on_kind] + task1 = datastore.Entity(client.key("Task", "task1")) + task2 = datastore.Entity(client.key("Task", "task2")) + + tasks = [task1, task2] + client.put_multi(tasks) + all_tasks_query = client.query(kind="Task") + all_tasks_count_query = client.aggregation_query(all_tasks_query).count() + query_result = all_tasks_count_query.fetch() + for aggregation_results in query_result: + for aggregation in aggregation_results: + print(f"Total tasks (accessible from default alias) is {aggregation.value}") + # [END datastore_count_on_kind] + return tasks + + +def count_query_with_limit(client): + # [START datastore_count_with_limit] + task1 = datastore.Entity(client.key("Task", "task1")) + task2 = datastore.Entity(client.key("Task", "task2")) + task3 = datastore.Entity(client.key("Task", "task3")) + + tasks = [task1, task2, task3] + client.put_multi(tasks) + all_tasks_query = client.query(kind="Task") + all_tasks_count_query = client.aggregation_query(all_tasks_query).count() + query_result = all_tasks_count_query.fetch(limit=2) + for aggregation_results in query_result: + for aggregation in aggregation_results: + print(f"We have at least {aggregation.value} tasks") + # [END datastore_count_with_limit] + return tasks + + +def count_query_property_filter(client): + # [START datastore_count_with_property_filter] + task1 = datastore.Entity(client.key("Task", "task1")) + task2 = datastore.Entity(client.key("Task", "task2")) + task3 = datastore.Entity(client.key("Task", "task3")) + + task1["done"] = True + task2["done"] = False + task3["done"] = True + + tasks = [task1, task2, task3] + client.put_multi(tasks) + completed_tasks = client.query(kind="Task").add_filter("done", "=", True) + remaining_tasks = client.query(kind="Task").add_filter("done", "=", False) + + completed_tasks_query = client.aggregation_query(query=completed_tasks).count( + alias="total_completed_count" + ) + remaining_tasks_query = client.aggregation_query(query=remaining_tasks).count( + alias="total_remaining_count" + ) + + completed_query_result = completed_tasks_query.fetch() + for aggregation_results in completed_query_result: + for aggregation_result in aggregation_results: + if aggregation_result.alias == "total_completed_count": + print(f"Total completed tasks count is {aggregation_result.value}") + + remaining_query_result = remaining_tasks_query.fetch() + for aggregation_results in remaining_query_result: + for aggregation_result in aggregation_results: + if aggregation_result.alias == "total_remaining_count": + print(f"Total remaining tasks count is {aggregation_result.value}") + # [END datastore_count_with_property_filter] + return tasks + + +def count_query_with_stale_read(client): + + tasks = [task for task in client.query(kind="Task").fetch()] + client.delete_multi(tasks) # ensure the database is empty before starting + + # [START datastore_count_query_with_stale_read] + task1 = datastore.Entity(client.key("Task", "task1")) + task2 = datastore.Entity(client.key("Task", "task2")) + + # Saving two tasks + task1["done"] = True + task2["done"] = False + client.put_multi([task1, task2]) + time.sleep(10) + + past_timestamp = datetime.now( + timezone.utc + ) # we have two tasks in database at this time. + time.sleep(10) + + # Saving third task + task3 = datastore.Entity(client.key("Task", "task3")) + task3["done"] = False + client.put(task3) + + all_tasks = client.query(kind="Task") + all_tasks_count = client.aggregation_query( + query=all_tasks, + ).count(alias="all_tasks_count") + + # Executing aggregation query + query_result = all_tasks_count.fetch() + for aggregation_results in query_result: + for aggregation_result in aggregation_results: + print(f"Latest tasks count is {aggregation_result.value}") + + # Executing aggregation query with past timestamp + tasks_in_past = client.aggregation_query(query=all_tasks).count( + alias="tasks_in_past" + ) + tasks_in_the_past_query_result = tasks_in_past.fetch(read_time=past_timestamp) + for aggregation_results in tasks_in_the_past_query_result: + for aggregation_result in aggregation_results: + print(f"Stale tasks count is {aggregation_result.value}") + # [END datastore_count_query_with_stale_read] + return [task1, task2, task3] + + +def sum_query_on_kind(client): + # [START datastore_sum_aggregation_query_on_kind] + # Set up sample entities + # Use incomplete key to auto-generate ID + task1 = datastore.Entity(client.key("Task")) + task2 = datastore.Entity(client.key("Task")) + task3 = datastore.Entity(client.key("Task")) + + task1["hours"] = 5 + task2["hours"] = 3 + task3["hours"] = 1 + + tasks = [task1, task2, task3] + client.put_multi(tasks) + + # Execute sum aggregation query + all_tasks_query = client.query(kind="Task") + all_tasks_sum_query = client.aggregation_query(all_tasks_query).sum("hours") + query_result = all_tasks_sum_query.fetch() + for aggregation_results in query_result: + for aggregation in aggregation_results: + print(f"Total sum of hours in tasks is {aggregation.value}") + # [END datastore_sum_aggregation_query_on_kind] + return tasks + + +def sum_query_property_filter(client): + # [START datastore_sum_aggregation_query_with_filters] + # Set up sample entities + # Use incomplete key to auto-generate ID + task1 = datastore.Entity(client.key("Task")) + task2 = datastore.Entity(client.key("Task")) + task3 = datastore.Entity(client.key("Task")) + + task1["hours"] = 5 + task2["hours"] = 3 + task3["hours"] = 1 + + task1["done"] = True + task2["done"] = True + task3["done"] = False + + tasks = [task1, task2, task3] + client.put_multi(tasks) + + # Execute sum aggregation query with filters + completed_tasks = client.query(kind="Task").add_filter("done", "=", True) + completed_tasks_query = client.aggregation_query(query=completed_tasks).sum( + property_ref="hours", alias="total_completed_sum_hours" + ) + + completed_query_result = completed_tasks_query.fetch() + for aggregation_results in completed_query_result: + for aggregation_result in aggregation_results: + if aggregation_result.alias == "total_completed_sum_hours": + print( + f"Total sum of hours in completed tasks is {aggregation_result.value}" + ) + # [END datastore_sum_aggregation_query_with_filters] + return tasks + + +def avg_query_on_kind(client): + # [START datastore_avg_aggregation_query_on_kind] + # Set up sample entities + # Use incomplete key to auto-generate ID + task1 = datastore.Entity(client.key("Task")) + task2 = datastore.Entity(client.key("Task")) + task3 = datastore.Entity(client.key("Task")) + + task1["hours"] = 5 + task2["hours"] = 3 + task3["hours"] = 1 + + tasks = [task1, task2, task3] + client.put_multi(tasks) + + # Execute average aggregation query + all_tasks_query = client.query(kind="Task") + all_tasks_avg_query = client.aggregation_query(all_tasks_query).avg("hours") + query_result = all_tasks_avg_query.fetch() + for aggregation_results in query_result: + for aggregation in aggregation_results: + print(f"Total average of hours in tasks is {aggregation.value}") + # [END datastore_avg_aggregation_query_on_kind] + return tasks + + +def avg_query_property_filter(client): + # [START datastore_avg_aggregation_query_with_filters] + # Set up sample entities + # Use incomplete key to auto-generate ID + task1 = datastore.Entity(client.key("Task")) + task2 = datastore.Entity(client.key("Task")) + task3 = datastore.Entity(client.key("Task")) + + task1["hours"] = 5 + task2["hours"] = 3 + task3["hours"] = 1 + + task1["done"] = True + task2["done"] = True + task3["done"] = False + + tasks = [task1, task2, task3] + client.put_multi(tasks) + + # Execute average aggregation query with filters + completed_tasks = client.query(kind="Task").add_filter("done", "=", True) + completed_tasks_query = client.aggregation_query(query=completed_tasks).avg( + property_ref="hours", alias="total_completed_avg_hours" + ) + + completed_query_result = completed_tasks_query.fetch() + for aggregation_results in completed_query_result: + for aggregation_result in aggregation_results: + if aggregation_result.alias == "total_completed_avg_hours": + print( + f"Total average of hours in completed tasks is {aggregation_result.value}" + ) + # [END datastore_avg_aggregation_query_with_filters] + return tasks + + +def multiple_aggregations_query(client): + # [START datastore_multiple_aggregation_in_structured_query] + # Set up sample entities + # Use incomplete key to auto-generate ID + task1 = datastore.Entity(client.key("Task")) + task2 = datastore.Entity(client.key("Task")) + task3 = datastore.Entity(client.key("Task")) + + task1["hours"] = 5 + task2["hours"] = 3 + task3["hours"] = 1 + + tasks = [task1, task2, task3] + client.put_multi(tasks) + + # Execute query with multiple aggregations + all_tasks_query = client.query(kind="Task") + aggregation_query = client.aggregation_query(all_tasks_query) + # Add aggregations + aggregation_query.add_aggregations( + [ + datastore.aggregation.CountAggregation(alias="count_aggregation"), + datastore.aggregation.SumAggregation( + property_ref="hours", alias="sum_aggregation" + ), + datastore.aggregation.AvgAggregation( + property_ref="hours", alias="avg_aggregation" + ), + ] + ) + + query_result = aggregation_query.fetch() + for aggregation_results in query_result: + for aggregation in aggregation_results: + print(f"{aggregation.alias} value is {aggregation.value}") + # [END datastore_multiple_aggregation_in_structured_query] + return tasks + + +def explain_analyze_entity(client): + # [START datastore_query_explain_analyze_entity] + # Build the query with explain_options + # analzye = true to get back the query stats, plan info, and query results + query = client.query( + kind="Task", explain_options=datastore.ExplainOptions(analyze=True) + ) + + # initiate the query + iterator = query.fetch() + + # explain_metrics is only available after query is completed + for task_result in iterator: + print(task_result) + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + + # get the execution stats + execution_stats = iterator.explain_metrics.execution_stats + print(f"Results returned: {execution_stats.results_returned}") + print(f"Execution duration: {execution_stats.execution_duration}") + print(f"Read operations: {execution_stats.read_operations}") + print(f"Debug stats: {execution_stats.debug_stats}") + # [END datastore_query_explain_analyze_entity] + + +def explain_entity(client): + # [START datastore_query_explain_entity] + # Build the query with explain_options + # by default (analyze = false), only plan_summary property is available + query = client.query(kind="Task", explain_options=datastore.ExplainOptions()) + + # initiate the query + iterator = query.fetch() + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + # [END datastore_query_explain_entity] + + +def explain_analyze_aggregation(client): + # [START datastore_query_explain_analyze_aggregation] + # Build the aggregation query with explain_options + # analzye = true to get back the query stats, plan info, and query results + all_tasks_query = client.query(kind="Task") + count_query = client.aggregation_query( + all_tasks_query, explain_options=datastore.ExplainOptions(analyze=True) + ).count() + + # initiate the query + iterator = count_query.fetch() + + # explain_metrics is only available after query is completed + for task_result in iterator: + print(task_result) + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + + # get the execution stats + execution_stats = iterator.explain_metrics.execution_stats + print(f"Results returned: {execution_stats.results_returned}") + print(f"Execution duration: {execution_stats.execution_duration}") + print(f"Read operations: {execution_stats.read_operations}") + print(f"Debug stats: {execution_stats.debug_stats}") + # [END datastore_query_explain_analyze_aggregation] + + +def explain_aggregation(client): + # [START datastore_query_explain_aggregation] + # Build the aggregation query with explain_options + # by default (analyze = false), only plan_summary property is available + all_tasks_query = client.query(kind="Task") + count_query = client.aggregation_query( + all_tasks_query, explain_options=datastore.ExplainOptions() + ).count() + + # initiate the query + iterator = count_query.fetch() + + # get the plan summary + plan_summary = iterator.explain_metrics.plan_summary + print(f"Indexes used: {plan_summary.indexes_used}") + # [END datastore_query_explain_aggregation] + + +def main(project_id): + client = datastore.Client(project_id) + + for name, function in globals().items(): + if name in ( + "main", + "_preamble", + "defaultdict", + "datetime", + "timezone", + "timedelta", + ) or not callable(function): + continue + + print(name) + pprint(function(client)) + print("\n-----------------\n") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Demonstrates datastore API operations." + ) + parser.add_argument("project_id", help="Your cloud project ID.") + + args = parser.parse_args() + + main(args.project_id) diff --git a/datastore/samples/snippets/snippets_test.py b/datastore/samples/snippets/snippets_test.py new file mode 100644 index 00000000000..ae3b2948b34 --- /dev/null +++ b/datastore/samples/snippets/snippets_test.py @@ -0,0 +1,249 @@ +# Copyright 2022 Google, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import backoff +import google.api_core.exceptions +from google.cloud import datastore +from google.cloud import datastore_admin_v1 +import pytest + +import snippets + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] + + +class CleanupClient(datastore.Client): + def __init__(self, *args, **kwargs): + super(CleanupClient, self).__init__(*args, **kwargs) + self.entities_to_delete = [] + self.keys_to_delete = [] + + def cleanup(self): + with self.batch(): + self.delete_multi( + list(set([x.key for x in self.entities_to_delete if x])) + + list(set(self.keys_to_delete)) + ) + + +@pytest.fixture +def client(): + client = CleanupClient(PROJECT) + yield client + client.cleanup() + + +@pytest.fixture(scope="session", autouse=True) +def setup_indexes(request): + # Set up required indexes + admin_client = datastore_admin_v1.DatastoreAdminClient() + + indexes = [] + done_property_index = datastore_admin_v1.Index.IndexedProperty( + name="done", direction=datastore_admin_v1.Index.Direction.ASCENDING + ) + hour_property_index = datastore_admin_v1.Index.IndexedProperty( + name="hours", direction=datastore_admin_v1.Index.Direction.ASCENDING + ) + done_hour_index = datastore_admin_v1.Index( + kind="Task", + ancestor=datastore_admin_v1.Index.AncestorMode.NONE, + properties=[done_property_index, hour_property_index], + ) + indexes.append(done_hour_index) + + for index in indexes: + request = datastore_admin_v1.CreateIndexRequest(project_id=PROJECT, index=index) + # Create the required index + # Dependant tests will fail until the index is ready + try: + admin_client.create_index(request) + # Pass if the index already exists + except (google.api_core.exceptions.AlreadyExists): + pass + + +@pytest.mark.flaky +class TestDatastoreSnippets: + # These tests mostly just test the absence of exceptions. + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_in_query(self, client): + tasks = snippets.in_query(client) + client.entities_to_delete.extend(tasks) + assert tasks is not None + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_not_equals_query(self, client): + tasks = snippets.not_equals_query(client) + client.entities_to_delete.extend(tasks) + assert tasks is not None + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_not_in_query(self, client): + tasks = snippets.not_in_query(client) + client.entities_to_delete.extend(tasks) + assert tasks is not None + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_query_with_readtime(self, client): + tasks = snippets.query_with_readtime(client) + client.entities_to_delete.extend(tasks) + assert tasks is not None + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_count_query_in_transaction(self, client): + with pytest.raises(ValueError) as excinfo: + snippets.count_query_in_transaction(client) + assert "User 'John' cannot have more than 2 tasks" in str(excinfo.value) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_count_query_on_kind(self, capsys, client): + tasks = snippets.count_query_on_kind(client) + captured = capsys.readouterr() + assert ( + captured.out.strip() == "Total tasks (accessible from default alias) is 2" + ) + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_count_query_with_limit(self, capsys, client): + tasks = snippets.count_query_with_limit(client) + captured = capsys.readouterr() + assert captured.out.strip() == "We have at least 2 tasks" + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_count_query_property_filter(self, capsys, client): + tasks = snippets.count_query_property_filter(client) + captured = capsys.readouterr() + + assert "Total completed tasks count is 2" in captured.out + assert "Total remaining tasks count is 1" in captured.out + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_count_query_with_stale_read(self, capsys, client): + tasks = snippets.count_query_with_stale_read(client) + captured = capsys.readouterr() + + assert "Latest tasks count is 3" in captured.out + assert "Stale tasks count is 2" in captured.out + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_sum_query_on_kind(self, capsys, client): + tasks = snippets.sum_query_on_kind(client) + captured = capsys.readouterr() + assert captured.out.strip() == "Total sum of hours in tasks is 9" + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_sum_query_property_filter(self, capsys, client): + tasks = snippets.sum_query_property_filter(client) + captured = capsys.readouterr() + assert captured.out.strip() == "Total sum of hours in completed tasks is 8" + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_avg_query_on_kind(self, capsys, client): + tasks = snippets.avg_query_on_kind(client) + captured = capsys.readouterr() + assert captured.out.strip() == "Total average of hours in tasks is 3.0" + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_avg_query_property_filter(self, capsys, client): + tasks = snippets.avg_query_property_filter(client) + captured = capsys.readouterr() + assert ( + captured.out.strip() == "Total average of hours in completed tasks is 4.0" + ) + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_multiple_aggregations_query(self, capsys, client): + tasks = snippets.multiple_aggregations_query(client) + captured = capsys.readouterr() + assert "avg_aggregation value is 3.0" in captured.out + assert "count_aggregation value is 3" in captured.out + assert "sum_aggregation value is 9" in captured.out + assert captured.err == "" + + client.entities_to_delete.extend(tasks) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_analyze_entity(self, capsys, client): + snippets.explain_analyze_entity(client) + captured = capsys.readouterr() + assert ( + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out + ) + assert "Results returned: 0" in captured.out + assert "Execution duration: 0:00" in captured.out + assert "Read operations: 0" in captured.out + assert "Debug stats: {" in captured.out + assert captured.err == "" + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_entity(self, capsys, client): + snippets.explain_entity(client) + captured = capsys.readouterr() + assert ( + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out + ) + assert captured.err == "" + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_analyze_aggregation(self, capsys, client): + snippets.explain_analyze_aggregation(client) + captured = capsys.readouterr() + assert ( + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out + ) + assert "Results returned: 1" in captured.out + assert "Execution duration: 0:00" in captured.out + assert "Read operations: 1" in captured.out + assert "Debug stats: {" in captured.out + assert captured.err == "" + + @backoff.on_exception(backoff.expo, AssertionError, max_time=240) + def test_explain_aggregation(self, capsys, client): + snippets.explain_aggregation(client) + captured = capsys.readouterr() + assert ( + "Indexes used: [{'properties': '(__name__ ASC)', 'query_scope': 'Collection group'}]" + in captured.out + ) + assert captured.err == ""