diff --git a/.env_sample b/.env_sample new file mode 100644 index 0000000..4a5577f --- /dev/null +++ b/.env_sample @@ -0,0 +1 @@ +GCP_PROJECT_ID = diff --git a/.github/workflows/bigquery-integration.yml b/.github/workflows/bigquery-integration.yml new file mode 100644 index 0000000..42b1b1c --- /dev/null +++ b/.github/workflows/bigquery-integration.yml @@ -0,0 +1,64 @@ +name: BigQuery Integration Tests + +on: + workflow_dispatch: + inputs: + test_suite: + type: choice + description: Test Suite to Run + default: "all" + options: + - all + - cohort_analysis + - composite_rank + - cross_shop + - customer_decision_hierarchy + - haversine + - hml_segmentation + - product_association + - revenue_tree + - rfm_segmentation + - segstats_segmentation + - threshold_segmentation + +permissions: + contents: read + +concurrency: + group: "bigquery-tests" + cancel-in-progress: true + +jobs: + integration-tests: + name: Run BigQuery Integration Tests + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install uv Package + run: | + pip install --upgrade pip + pip install uv==0.5.30 + + - name: Install Dependencies + run: | + uv sync + uv sync --group dev + + - name: Set up GCP Authentication + uses: google-github-actions/auth@v2 + with: + credentials_json: ${{ secrets.GCP_SA_KEY }} + + - name: Run Integration Tests + env: + TEST_SUITE: ${{ inputs.test_suite }} + run: | + uv run pytest tests/integration/bigquery -v \ + $(if [ "$TEST_SUITE" != "all" ]; then echo "-k $TEST_SUITE"; fi) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 415c56d..1ae46c6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: hooks: - id: pytest name: pytest - entry: uv run pytest --cov=pyretailscience --cov-report=xml --cov-branch tests + entry: uv run pytest --cov=pyretailscience --cov-report=xml --cov-branch tests --ignore=tests/integration/bigquery language: system types: [python] pass_filenames: false diff --git a/README.md b/README.md index 24ca462..635ff7e 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,4 @@ + ![PyRetailScience Logo](https://raw.githubusercontent.com/Data-Simply/pyretailscience/main/readme_assets/logo.png) # PyRetailScience @@ -208,3 +209,68 @@ Built with expertise doing analytics and data science for scale-ups to multi-nat ## License This project is licensed under the Elastic License 2.0 - see the [LICENSE](LICENSE) file for details. + +# BigQuery Integration Tests + +## Overview + +This directory contains integration tests that verify all PyRetailScience analysis modules +work correctly with Google BigQuery as a backend. These tests confirm that the Ibis-based +code paths function correctly when connected to BigQuery. + +## Test Coverage + +The integration tests cover the analysis modules. + +## Prerequisites + +To run these tests, you need: + +1. Access to a Google Cloud Platform account +2. A service account with BigQuery permissions +3. The service account key JSON file +4. The test dataset must be loaded in BigQuery (dataset: `test_data`, table: `transactions`) + +## Running the Tests + +### Manual Setup + +- Set up authentication: + +```bash +export GOOGLE_APPLICATION_CREDENTIALS=/path/to/your/service-account-key.json +export GCP_PROJECT_ID=your-project-id +``` + +- Install dependencies: + +```bash +uv sync +``` + +- Run the tests: + +```bash +# Run all tests +uv run pytest tests/integration/bigquery -v + +# Run specific test module +uv run pytest tests/integration/bigquery/test_cohort_analysis.py -v +``` + +## Using GitHub Actions + +These tests can be run manually in GitHub Actions via the "BigQuery Integration Tests" workflow. To run: + +1. Go to the "Actions" tab in the GitHub repository +2. Select the "BigQuery Integration Tests" workflow +3. Click "Run workflow" +4. Optionally enter a test filter pattern (e.g., "test_cohort_analysis") +5. Click "Run workflow" + +### Required Secrets + +To run the workflow in GitHub Actions, add these secrets to your repository: + +- `GCP_SA_KEY`: The entire JSON content of your GCP service account key file +- `GCP_PROJECT_ID`: Your GCP project ID diff --git a/pyproject.toml b/pyproject.toml index f8b11b9..2ec5965 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,11 +27,13 @@ name = "Murray Vanwyk" [dependency-groups] dev = [ "freezegun>=1.5.1,<2", + "ibis-framework[bigquery]>=10.0.0,<11", "nbstripout>=0.7.1,<0.8", "pre-commit>=3.6.2,<4", "pytest-cov>=4.1.0,<5", "pytest-mock>=3.14.0,<4", "pytest>=8.0.0,<9", + "python-dotenv>=1.0.0,<2", "ruff>=0.9,<0.10", "tomlkit>=0.12,<1", ] diff --git a/pyretailscience/segmentation/threshold.py b/pyretailscience/segmentation/threshold.py index 933c0bb..a216fb0 100644 --- a/pyretailscience/segmentation/threshold.py +++ b/pyretailscience/segmentation/threshold.py @@ -83,14 +83,11 @@ def __init__( window = ibis.window(order_by=ibis.asc(df[value_col])) df = df.mutate(ptile=ibis.percent_rank().over(window)) - case = ibis.case() - + case_args = [] for quantile, segment in zip(thresholds, segments, strict=True): - case = case.when(df["ptile"] <= quantile, segment) - - case = case.end() + case_args.append((df["ptile"] <= quantile, segment)) - df = df.mutate(segment_name=case).drop(["ptile"]) + df = df.mutate(segment_name=ibis.cases(*case_args)).drop(["ptile"]) if zero_value_customers == "separate_segment": df = ibis.union(df, zero_df) diff --git a/tests/integration/bigquery/conftest.py b/tests/integration/bigquery/conftest.py new file mode 100644 index 0000000..c65b03e --- /dev/null +++ b/tests/integration/bigquery/conftest.py @@ -0,0 +1,33 @@ +"""BigQuery integration test fixtures.""" + +import os + +import ibis +import pytest +from dotenv import load_dotenv +from google.cloud import bigquery +from loguru import logger + +load_dotenv() +client = bigquery.Client(project="pyretailscience-infra") + + +@pytest.fixture(scope="session") +def bigquery_connection(): + """Connect to BigQuery for integration tests.""" + try: + conn = ibis.bigquery.connect( + project_id=os.environ.get("GCP_PROJECT_ID"), + ) + logger.info("Connected to BigQuery") + except Exception as e: + logger.error(f"Failed to connect to BigQuery: {e}") + raise + else: + return conn + + +@pytest.fixture(scope="session") +def transactions_table(bigquery_connection): + """Get the transactions table for testing.""" + return bigquery_connection.table("test_data.transactions") diff --git a/tests/integration/bigquery/test_cohort_analysis.py b/tests/integration/bigquery/test_cohort_analysis.py new file mode 100644 index 0000000..545a443 --- /dev/null +++ b/tests/integration/bigquery/test_cohort_analysis.py @@ -0,0 +1,27 @@ +"""Integration tests for Cohort Analysis with BigQuery.""" + +import pytest + +from pyretailscience.analysis.cohort import CohortAnalysis + + +def test_cohort_analysis_with_bigquery(transactions_table): + """Integration test for CohortAnalysis using BigQuery backend and Ibis table. + + This test ensures that the CohortAnalysis class initializes and executes successfully + using BigQuery data with various combinations of aggregation parameters. + """ + limited_table = transactions_table.limit(5000) + + try: + CohortAnalysis( + df=limited_table, + aggregation_column="unit_spend", + agg_func="sum", + period="week", + percentage=True, + ) + except Exception as e: # noqa: BLE001 + pytest.fail( + f"CohortAnalysis failed: {e}", + ) diff --git a/tests/integration/bigquery/test_composite_rank.py b/tests/integration/bigquery/test_composite_rank.py new file mode 100644 index 0000000..c25f3b1 --- /dev/null +++ b/tests/integration/bigquery/test_composite_rank.py @@ -0,0 +1,58 @@ +"""Integration tests for Composite Rank Analysis with BigQuery.""" + +import pytest + +from pyretailscience.analysis.composite_rank import CompositeRank + + +@pytest.fixture +def test_transactions_table(transactions_table): + """Fetch test transactions data from BigQuery and convert it to a pandas DataFrame. + + The expected table should include columns like `product_id`, `unit_spend`, and `customer_id`. + Adds a calculated column `spend_per_customer`. + """ + try: + ibis_table = transactions_table.mutate( + spend_per_customer=transactions_table.unit_spend / transactions_table.customer_id, + ) + + except Exception as e: # noqa: BLE001 + pytest.fail(f"Failed to fetch or preprocess test data: {e}") + else: + return ibis_table + + +def test_composite_rank_basic(test_transactions_table): + """Test basic CompositeRank functionality with BigQuery data.""" + rank_cols = [ + ("unit_spend", "desc"), + ("customer_id", "desc"), + ("spend_per_customer", "desc"), + ] + try: + result = CompositeRank( + df=test_transactions_table, + rank_cols=rank_cols, + agg_func="mean", + ignore_ties=False, + ) + assert result is not None + except Exception as e: # noqa: BLE001 + pytest.fail(f"CompositeRank basic test failed: {e}") + + +@pytest.mark.parametrize("ignore_ties", [False, True]) +def test_tie_handling(test_transactions_table, ignore_ties): + """Test handling of ties during rank calculation.""" + rank_cols = [("unit_spend", "desc")] + try: + result = CompositeRank( + df=test_transactions_table, + rank_cols=rank_cols, + agg_func="mean", + ignore_ties=ignore_ties, + ) + assert result is not None + except Exception as e: # noqa: BLE001 + pytest.fail(f"CompositeRank failed with ignore_ties={ignore_ties}: {e}") diff --git a/tests/integration/bigquery/test_cross_shop.py b/tests/integration/bigquery/test_cross_shop.py new file mode 100644 index 0000000..ef75a3d --- /dev/null +++ b/tests/integration/bigquery/test_cross_shop.py @@ -0,0 +1,49 @@ +"""Integration tests for Cross Shop Analysis with BigQuery.""" + +import pytest + +from pyretailscience.analysis.cross_shop import CrossShop + + +def test_cross_shop_with_bigquery(transactions_table): + """Test CrossShop with data fetched from BigQuery. + + This parameterized test verifies that CrossShop can be initialized + and run with data from BigQuery using different combinations of group columns, + value columns, and aggregation functions without throwing exceptions. + """ + transactions_df = transactions_table.limit(5000) + group_1_col = "brand_name" + group_2_col = "category_0_name" + group_3_col = "category_1_name" + group_1_vals = transactions_df[group_1_col].execute().dropna().unique() + group_2_vals = transactions_df[group_2_col].execute().dropna().unique() + + group_1_val = group_1_vals[0] + group_2_val = group_2_vals[0] + + group_3_val = None + if group_3_col is not None: + group_3_vals = transactions_df[group_3_col].execute().dropna().unique() + if len(group_3_vals) == 0: + pytest.skip(f"Not enough unique values for {group_3_col}") + group_3_val = group_3_vals[0] + + labels = ["Group 1", "Group 2"] if group_3_col is None else ["Group 1", "Group 2", "Group 3"] + + try: + CrossShop( + df=transactions_table, + group_1_col=group_1_col, + group_1_val=group_1_val, + group_2_col=group_2_col, + group_2_val=group_2_val, + group_3_col=group_3_col, + group_3_val=group_3_val, + labels=labels, + value_col="unit_quantity", + agg_func="count", + ) + + except Exception as e: # noqa: BLE001 + pytest.fail(f"CrossShop failed with parameters {group_1_col}, {group_2_col}, {group_3_col}: {e}") diff --git a/tests/integration/bigquery/test_customer_decision_hierarchy.py b/tests/integration/bigquery/test_customer_decision_hierarchy.py new file mode 100644 index 0000000..3d828dc --- /dev/null +++ b/tests/integration/bigquery/test_customer_decision_hierarchy.py @@ -0,0 +1,46 @@ +"""Integration tests for Customer Decision Hierarchy Analysis with BigQuery.""" + +import pytest + +from pyretailscience.analysis.customer_decision_hierarchy import CustomerDecisionHierarchy +from pyretailscience.options import ColumnHelper + +cols = ColumnHelper() + + +@pytest.mark.parametrize( + ("method", "min_var_explained", "exclude_same_transaction"), + [ + ("truncated_svd", 0.7, False), + ("truncated_svd", 0.7, None), + ("truncated_svd", None, False), + ("yules_q", 0.7, False), + ("yules_q", 0.7, None), + ("yules_q", None, False), + ("yules_q", None, None), + ], +) +def test_customer_decision_hierarchy_with_bigquery( + transactions_table, + method, + min_var_explained, + exclude_same_transaction, +): + """Test CustomerDecisionHierarchy with data fetched from BigQuery. + + This parameterized test verifies that CustomerDecisionHierarchy can be initialized + and run with data from BigQuery using different combinations of product columns + and methods without throwing exceptions. + """ + transactions_df = transactions_table.limit(5000).execute() + + try: + CustomerDecisionHierarchy( + df=transactions_df, + product_col="product_name", + exclude_same_transaction_products=exclude_same_transaction, + method=method, + min_var_explained=min_var_explained if min_var_explained is not None else 0.8, + ) + except Exception as e: # noqa: BLE001 + pytest.fail(f"CustomerDecisionHierarchy failed with, method={method}: {e}") diff --git a/tests/integration/bigquery/test_haversine.py b/tests/integration/bigquery/test_haversine.py new file mode 100644 index 0000000..25c4a77 --- /dev/null +++ b/tests/integration/bigquery/test_haversine.py @@ -0,0 +1,21 @@ +"""Integration tests for haversine distance function.""" + +import ibis +import pytest + +from pyretailscience.analysis.haversine import haversine_distance + + +def test_haversine(): + """Test the haversine function.""" + lat1 = ibis.literal(37.7749, type="float64") + lon1 = ibis.literal(-122.4194, type="float64") + lat2 = ibis.literal(40.7128, type="float64") + lon2 = ibis.literal(-74.0060, type="float64") + + distance_expr = haversine_distance(lat1, lon1, lat2, lon2) + + result = distance_expr.execute() + + expected_distance = 4129.086165 + assert pytest.approx(result, rel=1e-3) == expected_distance, "Distance calculation error" diff --git a/tests/integration/bigquery/test_hml_segmentation.py b/tests/integration/bigquery/test_hml_segmentation.py new file mode 100644 index 0000000..c7740a6 --- /dev/null +++ b/tests/integration/bigquery/test_hml_segmentation.py @@ -0,0 +1,32 @@ +"""Integration tests for the HMLSegmentation class using BigQuery.""" + +import pytest + +from pyretailscience.segmentation.hml import HMLSegmentation + + +@pytest.mark.parametrize( + "zero_value_customers", + ["separate_segment", "include_with_light", "exclude"], +) +def test_hml_segmentation_with_bigquery( + transactions_table, + zero_value_customers, +): + """Test HMLSegmentation with data fetched from BigQuery. + + This parameterized test verifies that HMLSegmentation can be initialized + and process data from BigQuery using zero-value handling without throwing exceptions. + """ + limited_transactions = transactions_table.limit(5000) + + try: + HMLSegmentation( + df=limited_transactions, + value_col="unit_cost", + agg_func="mean", + zero_value_customers=zero_value_customers, + ) + + except Exception as e: # noqa: BLE001 + pytest.fail(f"HMLSegmentation failed: {e}") diff --git a/tests/integration/bigquery/test_product_association.py b/tests/integration/bigquery/test_product_association.py new file mode 100644 index 0000000..6ae22d1 --- /dev/null +++ b/tests/integration/bigquery/test_product_association.py @@ -0,0 +1,39 @@ +"""Integration tests for Product Association Analysis with BigQuery.""" + +import pytest + +from pyretailscience.analysis.product_association import ProductAssociation + + +@pytest.mark.parametrize( + "target_item", + [None, "Electronics"], +) +def test_product_association_with_bigquery( + transactions_table, + target_item, +): + """Test ProductAssociation with data fetched from BigQuery. + + This parameterized test verifies that ProductAssociation can be initialized + and process data from BigQuery using target items without throwing exceptions. + """ + limited_transactions = transactions_table.limit(5000) + + try: + ProductAssociation( + df=limited_transactions, + value_col="brand_name", + group_col="transaction_id", + target_item=target_item, + min_occurrences=5, + min_cooccurrences=3, + min_support=0.01, + min_confidence=0.05, + min_uplift=1.0, + ) + + except Exception as e: # noqa: BLE001 + pytest.fail( + f"ProductAssociation failed with target_item={target_item}: {e}", + ) diff --git a/tests/integration/bigquery/test_revenue_tree.py b/tests/integration/bigquery/test_revenue_tree.py new file mode 100644 index 0000000..8311cd9 --- /dev/null +++ b/tests/integration/bigquery/test_revenue_tree.py @@ -0,0 +1,116 @@ +"""Integration tests for Revenue Tree Analysis with BigQuery.""" + +import pytest + +from pyretailscience.analysis.revenue_tree import RevenueTree, calc_tree_kpis +from pyretailscience.options import ColumnHelper + +cols = ColumnHelper() + + +@pytest.mark.parametrize( + "group_col", + [None, "category_0_name"], +) +def test_revenue_tree_with_bigquery( + transactions_table, + group_col, +): + """Test RevenueTree with data fetched from BigQuery. + + This parameterized test verifies that RevenueTree can be initialized + and process data from BigQuery using different group columns + without throwing exceptions. + """ + period_col = "transaction_date" + + limited_transactions = transactions_table.limit(10000) + + try: + RevenueTree( + df=limited_transactions, + period_col=period_col, + p1_value="2023-05-24", + p2_value="2023-04-15", + group_col=group_col, + ) + + except Exception as e: # noqa: BLE001 + pytest.fail(f"RevenueTree failed with period_col={period_col}, group_col={group_col}: {e}") + + +def test_calc_tree_kpis_with_bigquery(transactions_table): + """Test calc_tree_kpis function with data from BigQuery. + + This test verifies that the calc_tree_kpis function can process data derived + from BigQuery without throwing exceptions. + """ + limited_transactions = transactions_table.limit(8000) + + try: + df, p1_index, p2_index = RevenueTree._agg_data( + df=limited_transactions, + period_col="transaction_date", + p1_value="2023-05-24", + p2_value="2023-04-15", + group_col=None, + ) + + calc_tree_kpis( + df=df, + p1_index=p1_index, + p2_index=p2_index, + ) + + except Exception as e: # noqa: BLE001 + pytest.fail(f"calc_tree_kpis failed: {e}") + + +@pytest.mark.parametrize( + "include_qty", + [ + True, + False, + ], +) +def test_revenue_tree_quantity_handling_with_bigquery( + transactions_table, + include_qty, +): + """Test RevenueTree with and without quantity columns using BigQuery data. + + This test verifies that RevenueTree can process BigQuery data both with and + without quantity-related columns without throwing exceptions. + """ + period_col = "transaction_date" + limited_transactions = transactions_table.limit(6000) + + try: + if include_qty: + columns_to_keep = [ + cols.customer_id, + cols.transaction_id, + cols.unit_spend, + cols.unit_qty, + period_col, + ] + else: + columns_to_keep = [ + cols.customer_id, + cols.transaction_id, + cols.unit_spend, + period_col, + ] + + filtered_transactions = limited_transactions.select(columns_to_keep) + + RevenueTree( + df=filtered_transactions, + period_col=period_col, + p1_value="2023-05-24", + p2_value="2023-04-15", + group_col=None, + ) + + except Exception as e: # noqa: BLE001 + pytest.fail(f"RevenueTree quantity handling test failed with include_qty={include_qty}: {e}") diff --git a/tests/integration/bigquery/test_rfm_segmentation.py b/tests/integration/bigquery/test_rfm_segmentation.py new file mode 100644 index 0000000..20ef6df --- /dev/null +++ b/tests/integration/bigquery/test_rfm_segmentation.py @@ -0,0 +1,29 @@ +"""Integration tests for the RFMSegmentation class using BigQuery.""" + +import datetime + +import pytest + +from pyretailscience.segmentation.rfm import RFMSegmentation + + +@pytest.mark.parametrize( + "current_date", + [None, "2023-12-31", datetime.date(2023, 6, 30)], +) +def test_rfm_segmentation_with_bigquery( + transactions_table, + current_date, +): + """Test RFMSegmentation with data fetched from BigQuery. + + This parameterized test verifies that the RFMSegmentation class can be initialized + and process data from BigQuery using different current_date parameters without throwing exceptions. + """ + limited_table = transactions_table.limit(5000) + + try: + RFMSegmentation(df=limited_table, current_date=current_date) + + except Exception as e: # noqa: BLE001 + pytest.fail(f"RFMSegmentation failed with current_date={current_date}: {e}") diff --git a/tests/integration/bigquery/test_segstats_segmentation.py b/tests/integration/bigquery/test_segstats_segmentation.py new file mode 100644 index 0000000..e9db3f0 --- /dev/null +++ b/tests/integration/bigquery/test_segstats_segmentation.py @@ -0,0 +1,38 @@ +"""Integration tests for segmentation statistics using BigQuery data.""" + +import pytest + +from pyretailscience.segmentation.segstats import SegTransactionStats + + +@pytest.mark.parametrize( + ("calc_total", "extra_aggs"), + [ + (True, None), + (False, {"unique_products": ("product_id", "nunique")}), + ], +) +def test_seg_transaction_stats_with_bigquery( + transactions_table, + calc_total, + extra_aggs, +): + """Test SegTransactionStats with data fetched from BigQuery. + + This test verifies that SegTransactionStats can process data directly from + a BigQuery connection using Ibis without throwing exceptions. + """ + try: + limited_table = transactions_table.limit(10000) + + SegTransactionStats( + data=limited_table, + segment_col=["category_0_name", "category_1_name"], + calc_total=calc_total, + extra_aggs=extra_aggs, + ) + + except Exception as e: # noqa: BLE001 + pytest.fail( + f"SegTransactionStats failed with calc_total={calc_total}, extra_aggs={extra_aggs}: {e}", + ) diff --git a/tests/integration/bigquery/test_threshold_segmentation.py b/tests/integration/bigquery/test_threshold_segmentation.py new file mode 100644 index 0000000..b5f24c2 --- /dev/null +++ b/tests/integration/bigquery/test_threshold_segmentation.py @@ -0,0 +1,39 @@ +"""Tests for the ThresholdSegmentation class with BigQuery integration.""" + +import pytest + +from pyretailscience.options import ColumnHelper +from pyretailscience.segmentation.threshold import ThresholdSegmentation + +cols = ColumnHelper() + + +@pytest.mark.parametrize( + "zero_value_handling", + ["separate_segment", "exclude", "include_with_light"], +) +def test_threshold_segmentation_with_bigquery( + transactions_table, + zero_value_handling, +): + """Test ThresholdSegmentation with data fetched from BigQuery. + + This test verifies that ThresholdSegmentation can process data directly from + a BigQuery connection using Ibis without throwing exceptions. + """ + try: + limited_table = transactions_table.limit(1000) + + ThresholdSegmentation( + df=limited_table, + thresholds=[0.33, 0.66], + segments=["Low", "High"], + value_col=cols.unit_spend, + agg_func="mean", + zero_value_customers=zero_value_handling, + ) + + except Exception as e: # noqa: BLE001 + pytest.fail( + f"ThresholdSegmentation failed with zero_value_handling={zero_value_handling}: {e}", + ) diff --git a/uv.lock b/uv.lock index 187d485..8cbb41b 100644 --- a/uv.lock +++ b/uv.lock @@ -147,6 +147,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ea/63/da7237f805089ecc28a3f36bca6a21c31fcbc2eb380f3b8f1be3312abd14/bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6", size = 162750 }, ] +[[package]] +name = "cachetools" +version = "5.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/81/3747dad6b14fa2cf53fcf10548cf5aea6913e96fab41a3c198676f8948a5/cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4", size = 28380 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/76/20fa66124dbe6be5cafeb312ece67de6b61dd91a0247d1ea13db4ebb33c2/cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a", size = 10080 }, +] + [[package]] name = "certifi" version = "2024.7.4" @@ -344,6 +353,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321 }, ] +[[package]] +name = "db-dtypes" +version = "1.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "packaging" }, + { name = "pandas" }, + { name = "pyarrow" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7c/f3/65b49a0b3f6cc0e4daf46112f7590d189eba87ee36f915cc13a2e6a0cecf/db_dtypes-1.4.2.tar.gz", hash = "sha256:04348969e0d533de5f11ec3ac8fcb2dd983ac40229d042198ab9d8de51801a6e", size = 33539 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/aa/8f09c6af64d562606d128acab327dab759ac005a204f470c6d257f47d857/db_dtypes-1.4.2-py2.py3-none-any.whl", hash = "sha256:b3cd0128c8310a2e9ef249da2353e5cb07c62d8a3ce800c7990f9998eee74582", size = 18970 }, +] + [[package]] name = "debugpy" version = "1.8.5" @@ -497,6 +521,145 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034 }, ] +[[package]] +name = "google-api-core" +version = "2.24.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/5c/085bcb872556934bb119e5e09de54daa07873f6866b8f0303c49e72287f7/google_api_core-2.24.2.tar.gz", hash = "sha256:81718493daf06d96d6bc76a91c23874dbf2fac0adbbf542831b805ee6e974696", size = 163516 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/95/f472d85adab6e538da2025dfca9e976a0d125cc0af2301f190e77b76e51c/google_api_core-2.24.2-py3-none-any.whl", hash = "sha256:810a63ac95f3c441b7c0e43d344e372887f62ce9071ba972eacf32672e072de9", size = 160061 }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, + { name = "grpcio-status" }, +] + +[[package]] +name = "google-auth" +version = "2.40.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/a5/38c21d0e731bb716cffcf987bd9a3555cb95877ab4b616cfb96939933f20/google_auth-2.40.1.tar.gz", hash = "sha256:58f0e8416a9814c1d86c9b7f6acf6816b51aba167b2c76821965271bac275540", size = 280975 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/b1/1272c6e80847ba5349f5ccb7574596393d1e222543f5003cb810865c3575/google_auth-2.40.1-py2.py3-none-any.whl", hash = "sha256:ed4cae4f5c46b41bae1d19c036e06f6c371926e97b19e816fc854eff811974ee", size = 216101 }, +] + +[[package]] +name = "google-auth-oauthlib" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "requests-oauthlib" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/87/e10bf24f7bcffc1421b84d6f9c3377c30ec305d082cd737ddaa6d8f77f7c/google_auth_oauthlib-1.2.2.tar.gz", hash = "sha256:11046fb8d3348b296302dd939ace8af0a724042e8029c1b872d87fabc9f41684", size = 20955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/84/40ee070be95771acd2f4418981edb834979424565c3eec3cd88b6aa09d24/google_auth_oauthlib-1.2.2-py3-none-any.whl", hash = "sha256:fd619506f4b3908b5df17b65f39ca8d66ea56986e5472eb5978fd8f3786f00a2", size = 19072 }, +] + +[[package]] +name = "google-cloud-bigquery" +version = "3.30.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-resumable-media" }, + { name = "packaging" }, + { name = "python-dateutil" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/2f/3dda76b3ec029578838b1fe6396e6b86eb574200352240e23dea49265bb7/google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6", size = 474389 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/6d/856a6ca55c1d9d99129786c929a27dd9d31992628ebbff7f5d333352981f/google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877", size = 247885 }, +] + +[[package]] +name = "google-cloud-bigquery-storage" +version = "2.31.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "proto-plus" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/a1/fc51fd5db62aa6ebd2a20cac239adf636061d3b640bf0b4c0847c476acea/google_cloud_bigquery_storage-2.31.0.tar.gz", hash = "sha256:e4b42df3374dbc9575268c89d5dec47fced075c44904c463b12aed2b01be6790", size = 256446 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/60/8fd89f8874f4717435508ee719a888cfd24132f7ff4530b9effedede80cf/google_cloud_bigquery_storage-2.31.0-py3-none-any.whl", hash = "sha256:1721792f39f5ecb49b8503cf197ee8ab79f7deebf17fc4a4a44585959336365b", size = 256459 }, +] + +[[package]] +name = "google-cloud-core" +version = "2.4.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/b8/2b53838d2acd6ec6168fd284a990c76695e84c65deee79c9f3a4276f6b4f/google_cloud_core-2.4.3.tar.gz", hash = "sha256:1fab62d7102844b278fe6dead3af32408b1df3eb06f5c7e8634cbd40edc4da53", size = 35861 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/86/bda7241a8da2d28a754aad2ba0f6776e35b67e37c36ae0c45d49370f1014/google_cloud_core-2.4.3-py2.py3-none-any.whl", hash = "sha256:5130f9f4c14b4fafdff75c79448f9495cfade0d8775facf1b09c3bf67e027f6e", size = 29348 }, +] + +[[package]] +name = "google-crc32c" +version = "1.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/ae/87802e6d9f9d69adfaedfcfd599266bf386a54d0be058b532d04c794f76d/google_crc32c-1.7.1.tar.gz", hash = "sha256:2bff2305f98846f3e825dbeec9ee406f89da7962accdb29356e4eadc251bd472", size = 14495 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/94/220139ea87822b6fdfdab4fb9ba81b3fff7ea2c82e2af34adc726085bffc/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6fbab4b935989e2c3610371963ba1b86afb09537fd0c633049be82afe153ac06", size = 30468 }, + { url = "https://files.pythonhosted.org/packages/94/97/789b23bdeeb9d15dc2904660463ad539d0318286d7633fe2760c10ed0c1c/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ed66cbe1ed9cbaaad9392b5259b3eba4a9e565420d734e6238813c428c3336c9", size = 30313 }, + { url = "https://files.pythonhosted.org/packages/81/b8/976a2b843610c211e7ccb3e248996a61e87dbb2c09b1499847e295080aec/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6547b657621b6cbed3562ea7826c3e11cab01cd33b74e1f677690652883e77", size = 33048 }, + { url = "https://files.pythonhosted.org/packages/c9/16/a3842c2cf591093b111d4a5e2bfb478ac6692d02f1b386d2a33283a19dc9/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d68e17bad8f7dd9a49181a1f5a8f4b251c6dbc8cc96fb79f1d321dfd57d66f53", size = 32669 }, + { url = "https://files.pythonhosted.org/packages/04/17/ed9aba495916fcf5fe4ecb2267ceb851fc5f273c4e4625ae453350cfd564/google_crc32c-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:6335de12921f06e1f774d0dd1fbea6bf610abe0887a1638f64d694013138be5d", size = 33476 }, + { url = "https://files.pythonhosted.org/packages/dd/b7/787e2453cf8639c94b3d06c9d61f512234a82e1d12d13d18584bd3049904/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2d73a68a653c57281401871dd4aeebbb6af3191dcac751a76ce430df4d403194", size = 30470 }, + { url = "https://files.pythonhosted.org/packages/ed/b4/6042c2b0cbac3ec3a69bb4c49b28d2f517b7a0f4a0232603c42c58e22b44/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:22beacf83baaf59f9d3ab2bbb4db0fb018da8e5aebdce07ef9f09fce8220285e", size = 30315 }, + { url = "https://files.pythonhosted.org/packages/29/ad/01e7a61a5d059bc57b702d9ff6a18b2585ad97f720bd0a0dbe215df1ab0e/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19eafa0e4af11b0a4eb3974483d55d2d77ad1911e6cf6f832e1574f6781fd337", size = 33180 }, + { url = "https://files.pythonhosted.org/packages/3b/a5/7279055cf004561894ed3a7bfdf5bf90a53f28fadd01af7cd166e88ddf16/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d86616faaea68101195c6bdc40c494e4d76f41e07a37ffdef270879c15fb65", size = 32794 }, + { url = "https://files.pythonhosted.org/packages/0f/d6/77060dbd140c624e42ae3ece3df53b9d811000729a5c821b9fd671ceaac6/google_crc32c-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:b7491bdc0c7564fcf48c0179d2048ab2f7c7ba36b84ccd3a3e1c3f7a72d3bba6", size = 33477 }, + { url = "https://files.pythonhosted.org/packages/16/1b/1693372bf423ada422f80fd88260dbfd140754adb15cbc4d7e9a68b1cb8e/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85fef7fae11494e747c9fd1359a527e5970fc9603c90764843caabd3a16a0a48", size = 28241 }, + { url = "https://files.pythonhosted.org/packages/fd/3c/2a19a60a473de48717b4efb19398c3f914795b64a96cf3fbe82588044f78/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82", size = 28048 }, +] + +[[package]] +name = "google-resumable-media" +version = "2.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-crc32c" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/5a/0efdc02665dca14e0837b62c8a1a93132c264bd02054a15abb2218afe0ae/google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0", size = 2163099 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/35/b8d3baf8c46695858cb9d8835a53baa1eeb9906ddaf2f728a5f5b640fd1e/google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa", size = 81251 }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530 }, +] + [[package]] name = "graphviz" version = "0.20.3" @@ -518,6 +681,48 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/22/de/521ff6028fc8977d5669b48d84b002b7bf5c99a3e9c551c92d4c6bf95ec1/griffe-0.48.0-py3-none-any.whl", hash = "sha256:f944c6ff7bd31cf76f264adcd6ab8f3d00a2f972ae5cc8db2d7b6dcffeff65a2", size = 140816 }, ] +[[package]] +name = "grpcio" +version = "1.71.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/95/aa11fc09a85d91fbc7dd405dcb2a1e0256989d67bf89fa65ae24b3ba105a/grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c", size = 12549828 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/04/a085f3ad4133426f6da8c1becf0749872a49feb625a407a2e864ded3fb12/grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef", size = 5210453 }, + { url = "https://files.pythonhosted.org/packages/b4/d5/0bc53ed33ba458de95020970e2c22aa8027b26cc84f98bea7fcad5d695d1/grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7", size = 11347567 }, + { url = "https://files.pythonhosted.org/packages/e3/6d/ce334f7e7a58572335ccd61154d808fe681a4c5e951f8a1ff68f5a6e47ce/grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7", size = 5696067 }, + { url = "https://files.pythonhosted.org/packages/05/4a/80befd0b8b1dc2b9ac5337e57473354d81be938f87132e147c4a24a581bd/grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7", size = 6348377 }, + { url = "https://files.pythonhosted.org/packages/c7/67/cbd63c485051eb78663355d9efd1b896cfb50d4a220581ec2cb9a15cd750/grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e", size = 5940407 }, + { url = "https://files.pythonhosted.org/packages/98/4b/7a11aa4326d7faa499f764eaf8a9b5a0eb054ce0988ee7ca34897c2b02ae/grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b", size = 6030915 }, + { url = "https://files.pythonhosted.org/packages/eb/a2/cdae2d0e458b475213a011078b0090f7a1d87f9a68c678b76f6af7c6ac8c/grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7", size = 6648324 }, + { url = "https://files.pythonhosted.org/packages/27/df/f345c8daaa8d8574ce9869f9b36ca220c8845923eb3087e8f317eabfc2a8/grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3", size = 6197839 }, + { url = "https://files.pythonhosted.org/packages/f2/2c/cd488dc52a1d0ae1bad88b0d203bc302efbb88b82691039a6d85241c5781/grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444", size = 3619978 }, + { url = "https://files.pythonhosted.org/packages/ee/3f/cf92e7e62ccb8dbdf977499547dfc27133124d6467d3a7d23775bcecb0f9/grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b", size = 4282279 }, + { url = "https://files.pythonhosted.org/packages/4c/83/bd4b6a9ba07825bd19c711d8b25874cd5de72c2a3fbf635c3c344ae65bd2/grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537", size = 5184101 }, + { url = "https://files.pythonhosted.org/packages/31/ea/2e0d90c0853568bf714693447f5c73272ea95ee8dad107807fde740e595d/grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7", size = 11310927 }, + { url = "https://files.pythonhosted.org/packages/ac/bc/07a3fd8af80467390af491d7dc66882db43884128cdb3cc8524915e0023c/grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec", size = 5654280 }, + { url = "https://files.pythonhosted.org/packages/16/af/21f22ea3eed3d0538b6ef7889fce1878a8ba4164497f9e07385733391e2b/grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594", size = 6312051 }, + { url = "https://files.pythonhosted.org/packages/49/9d/e12ddc726dc8bd1aa6cba67c85ce42a12ba5b9dd75d5042214a59ccf28ce/grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c", size = 5910666 }, + { url = "https://files.pythonhosted.org/packages/d9/e9/38713d6d67aedef738b815763c25f092e0454dc58e77b1d2a51c9d5b3325/grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67", size = 6012019 }, + { url = "https://files.pythonhosted.org/packages/80/da/4813cd7adbae6467724fa46c952d7aeac5e82e550b1c62ed2aeb78d444ae/grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db", size = 6637043 }, + { url = "https://files.pythonhosted.org/packages/52/ca/c0d767082e39dccb7985c73ab4cf1d23ce8613387149e9978c70c3bf3b07/grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79", size = 6186143 }, + { url = "https://files.pythonhosted.org/packages/00/61/7b2c8ec13303f8fe36832c13d91ad4d4ba57204b1c723ada709c346b2271/grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a", size = 3604083 }, + { url = "https://files.pythonhosted.org/packages/fd/7c/1e429c5fb26122055d10ff9a1d754790fb067d83c633ff69eddcf8e3614b/grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8", size = 4272191 }, +] + +[[package]] +name = "grpcio-status" +version = "1.71.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d7/53/a911467bece076020456401f55a27415d2d70d3bc2c37af06b44ea41fc5c/grpcio_status-1.71.0.tar.gz", hash = "sha256:11405fed67b68f406b3f3c7c5ae5104a79d2d309666d10d61b152e91d28fb968", size = 13669 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/d6/31fbc43ff097d8c4c9fc3df741431b8018f67bf8dfbe6553a555f6e5f675/grpcio_status-1.71.0-py3-none-any.whl", hash = "sha256:843934ef8c09e3e858952887467f8256aac3910c55f077a359a65b2b3cde3e68", size = 14424 }, +] + [[package]] name = "h11" version = "0.14.0" @@ -575,6 +780,18 @@ wheels = [ ] [package.optional-dependencies] +bigquery = [ + { name = "db-dtypes" }, + { name = "google-cloud-bigquery" }, + { name = "google-cloud-bigquery-storage" }, + { name = "numpy" }, + { name = "pandas" }, + { name = "pandas-gbq" }, + { name = "pyarrow" }, + { name = "pyarrow-hotfix" }, + { name = "pydata-google-auth" }, + { name = "rich" }, +] duckdb = [ { name = "duckdb" }, { name = "numpy" }, @@ -1405,6 +1622,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ad/9c/4a93b8e395b755c53628573d75d7b21985d9a0f416e978d637084ccc8ec3/numpy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df", size = 16208660 }, ] +[[package]] +name = "oauthlib" +version = "3.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/fa/fbf4001037904031639e6bfbfc02badfc7e12f137a8afa254df6c4c8a670/oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918", size = 177352 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/80/cab10959dc1faead58dc8384a781dfbf93cb4d33d50988f7a69f1b7c9bbe/oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca", size = 151688 }, +] + [[package]] name = "overrides" version = "7.7.0" @@ -1457,6 +1683,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/22/a5/a0b255295406ed54269814bc93723cfd1a0da63fb9aaf99e1364f07923e5/pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23", size = 11498828 }, ] +[[package]] +name = "pandas-gbq" +version = "0.28.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "db-dtypes" }, + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-auth-oauthlib" }, + { name = "google-cloud-bigquery" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pandas" }, + { name = "pyarrow" }, + { name = "pydata-google-auth" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cf/01/772caa64d1987f6b0eca64c643ac14f8899402dec70e9b08ae15ba410096/pandas_gbq-0.28.0.tar.gz", hash = "sha256:daa4ffb80c1c262185059adb4551ac0cc52013ca3b7ab72c11cec1011f242ae5", size = 64056 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/72/eba5962318d580defd234cc759e710ba588c6e6c1a1bc4d633939111827e/pandas_gbq-0.28.0-py2.py3-none-any.whl", hash = "sha256:6be441dff24cde87ebf1e61ee66a3a7c51c4894aa1db0f9983a2c927f57caad3", size = 37911 }, +] + [[package]] name = "pandocfilters" version = "1.5.1" @@ -1590,6 +1838,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e8/23/22750c4b768f09386d1c3cc4337953e8936f48a888fa6dddfb669b2c9088/prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10", size = 386411 }, ] +[[package]] +name = "proto-plus" +version = "1.26.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163 }, +] + +[[package]] +name = "protobuf" +version = "5.29.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/17/7d/b9dca7365f0e2c4fa7c193ff795427cfa6290147e5185ab11ece280a18e7/protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99", size = 424902 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/b2/043a1a1a20edd134563699b0e91862726a0dc9146c090743b6c44d798e75/protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7", size = 422709 }, + { url = "https://files.pythonhosted.org/packages/79/fc/2474b59570daa818de6124c0a15741ee3e5d6302e9d6ce0bdfd12e98119f/protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d", size = 434506 }, + { url = "https://files.pythonhosted.org/packages/46/de/7c126bbb06aa0f8a7b38aaf8bd746c514d70e6a2a3f6dd460b3b7aad7aae/protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0", size = 417826 }, + { url = "https://files.pythonhosted.org/packages/a2/b5/bade14ae31ba871a139aa45e7a8183d869efe87c34a4850c87b936963261/protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e", size = 319574 }, + { url = "https://files.pythonhosted.org/packages/46/88/b01ed2291aae68b708f7d334288ad5fb3e7aa769a9c309c91a0d55cb91b0/protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922", size = 319672 }, + { url = "https://files.pythonhosted.org/packages/12/fb/a586e0c973c95502e054ac5f81f88394f24ccc7982dac19c515acd9e2c93/protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862", size = 172551 }, +] + [[package]] name = "psutil" version = "6.0.0" @@ -1657,6 +1931,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e4/f4/9ec2222f5f5f8ea04f66f184caafd991a39c8782e31f5b0266f101cb68ca/pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178", size = 7888 }, ] +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259 }, +] + [[package]] name = "pycparser" version = "2.22" @@ -1666,6 +1961,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, ] +[[package]] +name = "pydata-google-auth" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "google-auth-oauthlib" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3f/0d/455cb39f0d5a914412b57c55c6b16977c61a5ac74b615eea4fb0dc54e329/pydata-google-auth-1.9.1.tar.gz", hash = "sha256:0a51ce41c601ca0bc69b8795bf58bedff74b4a6a007c9106c7cbcdec00eaced2", size = 29814 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/cb/cdeaba62aa3c48f0d8834afb82b4a21463cd83df34fe01f9daa89a08ec6c/pydata_google_auth-1.9.1-py2.py3-none-any.whl", hash = "sha256:75ffce5d106e34b717b31844c1639ea505b7d9550dc23b96fb6c20d086b53fa3", size = 15552 }, +] + [[package]] name = "pygments" version = "2.18.0" @@ -1699,7 +2008,7 @@ wheels = [ [[package]] name = "pyretailscience" -version = "0.23.0" +version = "0.24.0" source = { editable = "." } dependencies = [ { name = "duckdb" }, @@ -1720,11 +2029,13 @@ dependencies = [ [package.dev-dependencies] dev = [ { name = "freezegun" }, + { name = "ibis-framework", extra = ["bigquery"] }, { name = "nbstripout" }, { name = "pre-commit" }, { name = "pytest" }, { name = "pytest-cov" }, { name = "pytest-mock" }, + { name = "python-dotenv" }, { name = "ruff" }, { name = "tomlkit" }, ] @@ -1760,11 +2071,13 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "freezegun", specifier = ">=1.5.1,<2" }, + { name = "ibis-framework", extras = ["bigquery"], specifier = ">=10.0.0,<11" }, { name = "nbstripout", specifier = ">=0.7.1,<0.8" }, { name = "pre-commit", specifier = ">=3.6.2,<4" }, { name = "pytest", specifier = ">=8.0.0,<9" }, { name = "pytest-cov", specifier = ">=4.1.0,<5" }, { name = "pytest-mock", specifier = ">=3.14.0,<4" }, + { name = "python-dotenv", specifier = ">=1.0.0,<2" }, { name = "ruff", specifier = ">=0.9,<0.10" }, { name = "tomlkit", specifier = ">=0.12,<1" }, ] @@ -1832,6 +2145,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, ] +[[package]] +name = "python-dotenv" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, +] + [[package]] name = "python-json-logger" version = "2.0.7" @@ -2012,6 +2334,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, ] +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "oauthlib" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179 }, +] + [[package]] name = "rfc3339-validator" version = "0.1.4" @@ -2080,6 +2415,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/2f/b938864d66b86a6e4acadefdc56de75ef56f7cafdfd568a6464605457bd5/rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585", size = 214458 }, ] +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696 }, +] + [[package]] name = "ruff" version = "0.9.7"