Skip to content

Commit aa63f31

Browse files
Merge pull request #91 from PingThingsIO/staging
Release v5.33.0
2 parents 9ab0202 + 5866d93 commit aa63f31

File tree

5 files changed

+26
-22
lines changed

5 files changed

+26
-22
lines changed

.github/workflows/release.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ jobs:
1313
runs-on: ${{ matrix.os }}
1414
strategy:
1515
matrix:
16-
python-version: [3.7, 3.8, 3.9, "3.10", "3.11"]
16+
python-version: [3.8, 3.9, "3.10", "3.11"] # TODO 3.12
1717
os: [ubuntu-latest, macos-latest, windows-latest]
1818

1919
steps:

btrdb/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
## Module Info
1616
##########################################################################
1717

18-
__version_info__ = {"major": 5, "minor": 32, "micro": 0, "releaselevel": "final"}
18+
__version_info__ = {"major": 5, "minor": 33, "micro": 0, "releaselevel": "final"}
1919

2020
##########################################################################
2121
## Helper Functions

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "btrdb"
3-
version = "5.32.0"
3+
version = "5.33.0"
44
authors = [
55
{name="PingThingsIO", email="support@pingthings.io"},
66
]

tests/btrdb_integration/test_conn.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
import btrdb
88
from btrdb.exceptions import BTrDBError
9+
from btrdb.utils.credentials import credentials_by_profile
910

1011

1112
@pytest.mark.skipif(
@@ -20,9 +21,12 @@ def test_connection_info(self, conn):
2021
def test_incorrect_connect(
2122
self,
2223
):
24+
env_name = os.getenv("BTRDB_INTEGRATION_TEST_PROFILE")
25+
creds = credentials_by_profile(env_name)
26+
conn_str = creds["endpoints"]
2327
err_msg = r"""Could not connect to the database, error message: <_InactiveRpcError of RPC that terminated with:\n\tstatus = StatusCode.UNAUTHENTICATED\n\tdetails = "invalid api key"\n"""
2428
with pytest.raises(BTrDBError, match=err_msg):
25-
conn = btrdb.connect(conn_str="127.0.0.1:4410", apikey="BOGUS_KEY")
29+
conn = btrdb.connect(conn_str=conn_str, apikey="BOGUS_KEY")
2630

2731
@pytest.mark.xfail(
2832
reason="Should return BTrDBError, but returns GRPCError instead, FIXME"

tests/btrdb_integration/test_streamset.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,8 @@ def test_streamset_arrow_values(conn, tmp_collection):
5454
s2.insert(list(zip(t2, d2)))
5555
ss = btrdb.stream.StreamSet([s1, s2]).filter(start=100, end=121)
5656
expected_times = [100, 101, 105, 106, 110, 114, 115, 119, 120]
57-
expected_col1 = [0.0, np.NaN, 1.0, np.NaN, 2.0, np.NaN, 3.0, np.NaN, 4.0]
58-
expected_col2 = [np.NaN, 5.0, np.NaN, 6.0, 7.0, 8.0, np.NaN, 9.0, np.NaN]
57+
expected_col1 = [0.0, np.nan, 1.0, np.nan, 2.0, np.nan, 3.0, np.nan, 4.0]
58+
expected_col2 = [np.nan, 5.0, np.nan, 6.0, 7.0, 8.0, np.nan, 9.0, np.nan]
5959
expected_schema = pa.schema(
6060
[
6161
pa.field("time", pa.timestamp("ns", tz="UTC"), nullable=False),
@@ -65,8 +65,8 @@ def test_streamset_arrow_values(conn, tmp_collection):
6565
)
6666
values = ss.arrow_values()
6767
times = [t.value for t in values["time"]]
68-
col1 = [np.NaN if isnan(v.as_py()) else v.as_py() for v in values[str(s1.uuid)]]
69-
col2 = [np.NaN if isnan(v.as_py()) else v.as_py() for v in values[str(s2.uuid)]]
68+
col1 = [np.nan if isnan(v.as_py()) else v.as_py() for v in values[str(s1.uuid)]]
69+
col2 = [np.nan if isnan(v.as_py()) else v.as_py() for v in values[str(s2.uuid)]]
7070
assert times == expected_times
7171
assert col1 == expected_col1
7272
assert col2 == expected_col2
@@ -91,12 +91,12 @@ def test_streamset_template_schema(conn, tmp_collection):
9191
)
9292
ss = btrdb.stream.StreamSet([s1, s2]).filter(start=100, end=121, schema=schema)
9393
expected_times = [100, 101, 105, 106, 110, 114, 115, 119, 120]
94-
expected_col1 = [0.0, np.NaN, 1.0, np.NaN, 2.0, np.NaN, 3.0, np.NaN, 4.0]
95-
expected_col2 = [np.NaN, 5.0, np.NaN, 6.0, 7.0, 8.0, np.NaN, 9.0, np.NaN]
94+
expected_col1 = [0.0, np.nan, 1.0, np.nan, 2.0, np.nan, 3.0, np.nan, 4.0]
95+
expected_col2 = [np.nan, 5.0, np.nan, 6.0, 7.0, 8.0, np.nan, 9.0, np.nan]
9696
values = ss.arrow_values()
9797
times = [t.value for t in values["t"]]
98-
col1 = [np.NaN if isnan(v.as_py()) else v.as_py() for v in values["a"]]
99-
col2 = [np.NaN if isnan(v.as_py()) else v.as_py() for v in values["b"]]
98+
col1 = [np.nan if isnan(v.as_py()) else v.as_py() for v in values["a"]]
99+
col2 = [np.nan if isnan(v.as_py()) else v.as_py() for v in values["b"]]
100100
assert times == expected_times
101101
assert col1 == expected_col1
102102
assert col2 == expected_col2
@@ -292,8 +292,8 @@ def test_streamset_to_dataframe(conn, tmp_collection):
292292
ss = btrdb.stream.StreamSet([s1, s2]).filter(start=100, end=121)
293293
values = ss.to_dataframe()
294294
expected_times = [100, 101, 105, 106, 110, 114, 115, 119, 120]
295-
expected_col1 = [0.0, np.NaN, 1.0, np.NaN, 2.0, np.NaN, 3.0, np.NaN, 4.0]
296-
expected_col2 = [np.NaN, 5.0, np.NaN, 6.0, 7.0, 8.0, np.NaN, 9.0, np.NaN]
295+
expected_col1 = [0.0, np.nan, 1.0, np.nan, 2.0, np.nan, 3.0, np.nan, 4.0]
296+
expected_col2 = [np.nan, 5.0, np.nan, 6.0, 7.0, 8.0, np.nan, 9.0, np.nan]
297297
expected_dat = {
298298
tmp_collection + "/s1": expected_col1,
299299
tmp_collection + "/s2": expected_col2,
@@ -318,10 +318,10 @@ def test_arrow_streamset_to_dataframe(conn, tmp_collection):
318318
pa.scalar(v, type=pa.timestamp("ns", tz="UTC")).as_py() for v in expected_times
319319
]
320320
expected_col1 = pa.array(
321-
[0.0, np.NaN, 1.0, np.NaN, 2.0, np.NaN, 3.0, np.NaN, 4.0], mask=[False] * 9
321+
[0.0, np.nan, 1.0, np.nan, 2.0, np.nan, 3.0, np.nan, 4.0], mask=[False] * 9
322322
)
323323
expected_col2 = pa.array(
324-
[np.NaN, 5.0, np.NaN, 6.0, 7.0, 8.0, np.NaN, 9.0, np.NaN], mask=[False] * 9
324+
[np.nan, 5.0, np.nan, 6.0, 7.0, 8.0, np.nan, 9.0, np.nan], mask=[False] * 9
325325
)
326326
expected_dat = {
327327
"time": expected_times,
@@ -361,8 +361,8 @@ def test_arrow_streamset_to_polars(conn, tmp_collection):
361361
expected_times = [
362362
pa.scalar(v, type=pa.timestamp("ns", tz="UTC")).as_py() for v in expected_times
363363
]
364-
expected_col1 = [0.0, np.NaN, 1.0, np.NaN, 2.0, np.NaN, 3.0, np.NaN, 4.0]
365-
expected_col2 = [np.NaN, 5.0, np.NaN, 6.0, 7.0, 8.0, np.NaN, 9.0, np.NaN]
364+
expected_col1 = [0.0, np.nan, 1.0, np.nan, 2.0, np.nan, 3.0, np.nan, 4.0]
365+
expected_col2 = [np.nan, 5.0, np.nan, 6.0, 7.0, 8.0, np.nan, 9.0, np.nan]
366366
expected_dat = {
367367
tmp_collection + "/s1": expected_col1,
368368
tmp_collection + "/s2": expected_col2,
@@ -395,8 +395,8 @@ def test_streamset_arrow_polars_vs_old_to_polars(conn, tmp_collection, name_call
395395
expected_times = [
396396
pa.scalar(v, type=pa.timestamp("ns", tz="UTC")).as_py() for v in expected_times
397397
]
398-
expected_col1 = [0.0, np.NaN, 1.0, np.NaN, 2.0, np.NaN, 3.0, np.NaN, 4.0]
399-
expected_col2 = [np.NaN, 5.0, np.NaN, 6.0, 7.0, 8.0, np.NaN, 9.0, np.NaN]
398+
expected_col1 = [0.0, np.nan, 1.0, np.nan, 2.0, np.nan, 3.0, np.nan, 4.0]
399+
expected_col2 = [np.nan, 5.0, np.nan, 6.0, 7.0, 8.0, np.nan, 9.0, np.nan]
400400
expected_dat = {
401401
tmp_collection + "/s1": expected_col1,
402402
tmp_collection + "/s2": expected_col2,
@@ -509,8 +509,8 @@ def test_timesnap_backward_extends_range(conn, tmp_collection):
509509
values = ss.arrow_values()
510510
assert [1 * sec, 2 * sec] == [t.value for t in values["time"]]
511511
assert [0.5, 2.0] == [v.as_py() for v in values[str(s1.uuid)]]
512-
assert [np.NaN, 2.0] == [
513-
np.NaN if isnan(v.as_py()) else v.as_py() for v in values[str(s2.uuid)]
512+
assert [np.nan, 2.0] == [
513+
np.nan if isnan(v.as_py()) else v.as_py() for v in values[str(s2.uuid)]
514514
]
515515
assert [1.0, 2.0] == [v.as_py() for v in values[str(s3.uuid)]]
516516

0 commit comments

Comments
 (0)