diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..de35933 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +__pycache__/ +**/__pycache__/ diff --git a/bmspy/classes.py b/bmspy/classes.py index 8db5058..73637ac 100644 --- a/bmspy/classes.py +++ b/bmspy/classes.py @@ -60,7 +60,7 @@ class UPS(ABC): @abstractmethod def items(self) -> Iterator[tuple[str, BMSField]]: - ... + ... # pragma: no cover def __bool__(self) -> bool: """Return True if the UPS has at least one populated field.""" diff --git a/bmspy/jbd_bms.py b/bmspy/jbd_bms.py index 133e75d..36627b9 100644 --- a/bmspy/jbd_bms.py +++ b/bmspy/jbd_bms.py @@ -224,7 +224,7 @@ def parse_03_response(response: bytearray, debug: int = 0) -> JBDBMS | bool: debugger("parse_03_response ERROR: failed to validate received checksum") return False - if data_len == 0: + if data_len == 0: # pragma: no cover return False result = JBDBMS() @@ -482,7 +482,7 @@ def parse_04_response(response: bytearray, debug: int = 0) -> BMSMultiField | bo debugger("parse_04_response ERROR: failed to validate received checksum") return False - if data_len == 0: + if data_len == 0: # pragma: no cover return False raw_values = {} diff --git a/poetry.lock b/poetry.lock index e474683..2c8e11c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7,165 +7,143 @@ description = "Python package for providing Mozilla's CA Bundle." optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"influxdb\" or extra == \"ecoflow\"" +markers = "extra == \"influxdb\"" files = [ {file = "certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a"}, {file = "certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580"}, ] [[package]] -name = "charset-normalizer" -version = "3.4.7" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = true -python-versions = ">=3.7" +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "sys_platform == \"win32\"" files = [ - {file = "charset_normalizer-3.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cdd68a1fb318e290a2077696b7eb7a21a49163c455979c639bf5a5dcdc46617d"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e17b8d5d6a8c47c85e68ca8379def1303fd360c3e22093a807cd34a71cd082b8"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:511ef87c8aec0783e08ac18565a16d435372bc1ac25a91e6ac7f5ef2b0bff790"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:007d05ec7321d12a40227aae9e2bc6dca73f3cb21058999a1df9e193555a9dcc"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf29836da5119f3c8a8a70667b0ef5fdca3bb12f80fd06487cfa575b3909b393"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:12d8baf840cc7889b37c7c770f478adea7adce3dcb3944d02ec87508e2dcf153"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d560742f3c0d62afaccf9f41fe485ed69bd7661a241f86a3ef0f0fb8b1a397af"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b14b2d9dac08e28bb8046a1a0434b1750eb221c8f5b87a68f4fa11a6f97b5e34"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:bc17a677b21b3502a21f66a8cc64f5bfad4df8a0b8434d661666f8ce90ac3af1"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:750e02e074872a3fad7f233b47734166440af3cdea0add3e95163110816d6752"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:4e5163c14bffd570ef2affbfdd77bba66383890797df43dc8b4cc7d6f500bf53"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6ed74185b2db44f41ef35fd1617c5888e59792da9bbc9190d6c7300617182616"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:94e1885b270625a9a828c9793b4d52a64445299baa1fea5a173bf1d3dd9a1a5a"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-win32.whl", hash = "sha256:6785f414ae0f3c733c437e0f3929197934f526d19dfaa75e18fdb4f94c6fb374"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:6696b7688f54f5af4462118f0bfa7c1621eeb87154f77fa04b9295ce7a8f2943"}, - {file = "charset_normalizer-3.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:66671f93accb62ed07da56613636f3641f1a12c13046ce91ffc923721f23c008"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7641bb8895e77f921102f72833904dcd9901df5d6d72a2ab8f31d04b7e51e4e7"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:202389074300232baeb53ae2569a60901f7efadd4245cf3a3bf0617d60b439d7"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:30b8d1d8c52a48c2c5690e152c169b673487a2a58de1ec7393196753063fcd5e"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:532bc9bf33a68613fd7d65e4b1c71a6a38d7d42604ecf239c77392e9b4e8998c"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe249cb4651fd12605b7288b24751d8bfd46d35f12a20b1ba33dea122e690df"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:65bcd23054beab4d166035cabbc868a09c1a49d1efe458fe8e4361215df40265"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:08e721811161356f97b4059a9ba7bafb23ea5ee2255402c42881c214e173c6b4"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e060d01aec0a910bdccb8be71faf34e7799ce36950f8294c8bf612cba65a2c9e"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:38c0109396c4cfc574d502df99742a45c72c08eff0a36158b6f04000043dbf38"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1c2a768fdd44ee4a9339a9b0b130049139b8ce3c01d2ce09f67f5a68048d477c"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:1a87ca9d5df6fe460483d9a5bbf2b18f620cbed41b432e2bddb686228282d10b"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d635aab80466bc95771bb78d5370e74d36d1fe31467b6b29b8b57b2a3cd7d22c"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae196f021b5e7c78e918242d217db021ed2a6ace2bc6ae94c0fc596221c7f58d"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-win32.whl", hash = "sha256:adb2597b428735679446b46c8badf467b4ca5f5056aae4d51a19f9570301b1ad"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:8e385e4267ab76874ae30db04c627faaaf0b509e1ccc11a95b3fc3e83f855c00"}, - {file = "charset_normalizer-3.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:d4a48e5b3c2a489fae013b7589308a40146ee081f6f509e047e0e096084ceca1"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6"}, - {file = "charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110"}, - {file = "charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c36c333c39be2dbca264d7803333c896ab8fa7d4d6f0ab7edb7dfd7aea6e98c0"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c2aed2e5e41f24ea8ef1590b8e848a79b56f3a5564a65ceec43c9d692dc7d8a"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54523e136b8948060c0fa0bc7b1b50c32c186f2fceee897a495406bb6e311d2b"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:715479b9a2802ecac752a3b0efa2b0b60285cf962ee38414211abdfccc233b41"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd6c2a1c7573c64738d716488d2cdd3c00e340e4835707d8fdb8dc1a66ef164e"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:c45e9440fb78f8ddabcf714b68f936737a121355bf59f3907f4e17721b9d1aae"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3534e7dcbdcf757da6b85a0bbf5b6868786d5982dd959b065e65481644817a18"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e8ac484bf18ce6975760921bb6148041faa8fef0547200386ea0b52b5d27bf7b"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a5fe03b42827c13cdccd08e6c0247b6a6d4b5e3cdc53fd1749f5896adcdc2356"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2d6eb928e13016cea4f1f21d1e10c1cebd5a421bc57ddf5b1142ae3f86824fab"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e74327fb75de8986940def6e8dee4f127cc9752bee7355bb323cc5b2659b6d46"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d6038d37043bced98a66e68d3aa2b6a35505dc01328cd65217cefe82f25def44"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7579e913a5339fb8fa133f6bbcfd8e6749696206cf05acdbdca71a1b436d8e72"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-win32.whl", hash = "sha256:5b77459df20e08151cd6f8b9ef8ef1f961ef73d85c21a555c7eed5b79410ec10"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-win_amd64.whl", hash = "sha256:92a0a01ead5e668468e952e4238cccd7c537364eb7d851ab144ab6627dbbe12f"}, - {file = "charset_normalizer-3.4.7-cp314-cp314-win_arm64.whl", hash = "sha256:67f6279d125ca0046a7fd386d01b311c6363844deac3e5b069b514ba3e63c246"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:effc3f449787117233702311a1b7d8f59cba9ced946ba727bdc329ec69028e24"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fbccdc05410c9ee21bbf16a35f4c1d16123dcdeb8a1d38f33654fa21d0234f79"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:733784b6d6def852c814bce5f318d25da2ee65dd4839a0718641c696e09a2960"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a89c23ef8d2c6b27fd200a42aa4ac72786e7c60d40efdc76e6011260b6e949c4"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c114670c45346afedc0d947faf3c7f701051d2518b943679c8ff88befe14f8e"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:a180c5e59792af262bf263b21a3c49353f25945d8d9f70628e73de370d55e1e1"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3c9a494bc5ec77d43cea229c4f6db1e4d8fe7e1bbffa8b6f0f0032430ff8ab44"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d828b6667a32a728a1ad1d93957cdf37489c57b97ae6c4de2860fa749b8fc1e"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:cf1493cd8607bec4d8a7b9b004e699fcf8f9103a9284cc94962cb73d20f9d4a3"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0c96c3b819b5c3e9e165495db84d41914d6894d55181d2d108cc1a69bfc9cce0"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:752a45dc4a6934060b3b0dab47e04edc3326575f82be64bc4fc293914566503e"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:8778f0c7a52e56f75d12dae53ae320fae900a8b9b4164b981b9c5ce059cd1fcb"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ce3412fbe1e31eb81ea42f4169ed94861c56e643189e1e75f0041f3fe7020abe"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-win32.whl", hash = "sha256:c03a41a8784091e67a39648f70c5f97b5b6a37f216896d44d2cdcb82615339a0"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-win_amd64.whl", hash = "sha256:03853ed82eeebbce3c2abfdbc98c96dc205f32a79627688ac9a27370ea61a49c"}, - {file = "charset_normalizer-3.4.7-cp314-cp314t-win_arm64.whl", hash = "sha256:c35abb8bfff0185efac5878da64c45dafd2b37fb0383add1be155a763c1f083d"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e5f4d355f0a2b1a31bc3edec6795b46324349c9cb25eed068049e4f472fb4259"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16d971e29578a5e97d7117866d15889a4a07befe0e87e703ed63cd90cb348c01"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dca4bbc466a95ba9c0234ef56d7dd9509f63da22274589ebd4ed7f1f4d4c54e3"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e80c8378d8f3d83cd3164da1ad2df9e37a666cdde7b1cb2298ed0b558064be30"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:36836d6ff945a00b88ba1e4572d721e60b5b8c98c155d465f56ad19d68f23734"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux_2_31_armv7l.whl", hash = "sha256:bd9b23791fe793e4968dba0c447e12f78e425c59fc0e3b97f6450f4781f3ee60"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:aef65cd602a6d0e0ff6f9930fcb1c8fec60dd2cfcb6facaf4bdb0e5873042db0"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:82b271f5137d07749f7bf32f70b17ab6eaabedd297e75dce75081a24f76eb545"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:1efde3cae86c8c273f1eb3b287be7d8499420cf2fe7585c41d370d3e790054a5"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:c593052c465475e64bbfe5dbd81680f64a67fdc752c56d7a0ae205dc8aeefe0f"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:af21eb4409a119e365397b2adbaca4c9ccab56543a65d5dbd9f920d6ac29f686"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:84c018e49c3bf790f9c2771c45e9313a08c2c2a6342b162cd650258b57817706"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dd915403e231e6b1809fe9b6d9fc55cf8fb5e02765ac625d9cd623342a7905d7"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-win32.whl", hash = "sha256:320ade88cfb846b8cd6b4ddf5ee9e80ee0c1f52401f2456b84ae1ae6a1a5f207"}, - {file = "charset_normalizer-3.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:1dc8b0ea451d6e69735094606991f32867807881400f808a106ee1d963c46a83"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:177a0ba5f0211d488e295aaf82707237e331c24788d8d76c96c5a41594723217"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e0d51f618228538a3e8f46bd246f87a6cd030565e015803691603f55e12afb5"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:14265bfe1f09498b9d8ec91e9ec9fa52775edf90fcbde092b25f4a33d444fea9"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87fad7d9ba98c86bcb41b2dc8dbb326619be2562af1f8ff50776a39e55721c5a"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f22dec1690b584cea26fade98b2435c132c1b5f68e39f5a0b7627cd7ae31f1dc"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:d61f00a0869d77422d9b2aba989e2d24afa6ffd552af442e0e58de4f35ea6d00"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6370e8686f662e6a3941ee48ed4742317cafbe5707e36406e9df792cdb535776"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a6c5863edfbe888d9eff9c8b8087354e27618d9da76425c119293f11712a6319"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ed065083d0898c9d5b4bbec7b026fd755ff7454e6e8b73a67f8c744b13986e24"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2cd4a60d0e2fb04537162c62bbbb4182f53541fe0ede35cdf270a1c1e723cc42"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:813c0e0132266c08eb87469a642cb30aaff57c5f426255419572aaeceeaa7bf4"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:07d9e39b01743c3717745f4c530a6349eadbfa043c7577eef86c502c15df2c67"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c0f081d69a6e58272819b70288d3221a6ee64b98df852631c80f293514d3b274"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-win32.whl", hash = "sha256:8751d2787c9131302398b11e6c8068053dcb55d5a8964e114b6e196cf16cb366"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:12a6fff75f6bc66711b73a2f0addfc4c8c15a20e805146a02d147a318962c444"}, - {file = "charset_normalizer-3.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:bb8cc7534f51d9a017b93e3e85b260924f909601c3df002bcdb58ddb4dc41a5c"}, - {file = "charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d"}, - {file = "charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5"}, + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] -name = "idna" -version = "3.13" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"ecoflow\"" +name = "coverage" +version = "7.13.5" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3"}, - {file = "idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242"}, + {file = "coverage-7.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0723d2c96324561b9aa76fb982406e11d93cdb388a7a7da2b16e04719cf7ca5"}, + {file = "coverage-7.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52f444e86475992506b32d4e5ca55c24fc88d73bcbda0e9745095b28ef4dc0cf"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:704de6328e3d612a8f6c07000a878ff38181ec3263d5a11da1db294fa6a9bdf8"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a1a6d79a14e1ec1832cabc833898636ad5f3754a678ef8bb4908515208bf84f4"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79060214983769c7ba3f0cee10b54c97609dca4d478fa1aa32b914480fd5738d"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:356e76b46783a98c2a2fe81ec79df4883a1e62895ea952968fb253c114e7f930"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0cef0cdec915d11254a7f549c1170afecce708d30610c6abdded1f74e581666d"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dc022073d063b25a402454e5712ef9e007113e3a676b96c5f29b2bda29352f40"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9b74db26dfea4f4e50d48a4602207cd1e78be33182bc9cbf22da94f332f99878"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ad146744ca4fd09b50c482650e3c1b1f4dfa1d4792e0a04a369c7f23336f0400"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:c555b48be1853fe3997c11c4bd521cdd9a9612352de01fa4508f16ec341e6fe0"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7034b5c56a58ae5e85f23949d52c14aca2cfc6848a31764995b7de88f13a1ea0"}, + {file = "coverage-7.13.5-cp310-cp310-win32.whl", hash = "sha256:eb7fdf1ef130660e7415e0253a01a7d5a88c9c4d158bcf75cbbd922fd65a5b58"}, + {file = "coverage-7.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:3e1bb5f6c78feeb1be3475789b14a0f0a5b47d505bfc7267126ccbd50289999e"}, + {file = "coverage-7.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d"}, + {file = "coverage-7.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8"}, + {file = "coverage-7.13.5-cp311-cp311-win32.whl", hash = "sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf"}, + {file = "coverage-7.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9"}, + {file = "coverage-7.13.5-cp311-cp311-win_arm64.whl", hash = "sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028"}, + {file = "coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01"}, + {file = "coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c"}, + {file = "coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf"}, + {file = "coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810"}, + {file = "coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de"}, + {file = "coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1"}, + {file = "coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17"}, + {file = "coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85"}, + {file = "coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b"}, + {file = "coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664"}, + {file = "coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d"}, + {file = "coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2"}, + {file = "coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a"}, + {file = "coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819"}, + {file = "coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911"}, + {file = "coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f"}, + {file = "coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0"}, + {file = "coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc"}, + {file = "coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633"}, + {file = "coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8"}, + {file = "coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b"}, + {file = "coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a"}, + {file = "coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215"}, + {file = "coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43"}, + {file = "coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45"}, + {file = "coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61"}, + {file = "coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179"}, ] [package.extras] -all = ["mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "influxdb3-python" @@ -188,25 +166,67 @@ reactivex = ">=4.0.4" urllib3 = ">=1.26.0" [package.extras] -async = ["aiocsv (>=1.2.2)", "aiohttp (>=3.8.1)"] -ciso = ["ciso8601 (>=2.1.1)"] -extra = ["numpy", "pandas (>=1.0.0)"] -test = ["aioresponses (>=0.7.3)", "coverage (>=4.0.3)", "flake8 (>=5.0.3)", "httpretty (==1.0.5)", "jinja2 (==3.1.3)", "nose (>=1.3.7)", "pluggy (>=0.3.1)", "psutil (>=5.6.3)", "py (>=1.4.31)", "pytest (>=5.0.0)", "pytest-cov (>=3.0.0)", "pytest-timeout (>=2.1.0)", "randomize (>=0.13)", "sphinx (==1.8.5)", "sphinx-rtd-theme"] +dataframe = ["pandas", "polars"] +pandas = ["pandas"] +polars = ["polars"] +test = ["cryptography (>=3.4.0)", "h2 (>=4.0.0,<5.0.0)", "pytest", "pytest-cov", "pytest-httpserver"] [[package]] -name = "prometheus-client" -version = "0.11.0" -description = "Python client for the Prometheus monitoring system." -optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["main"] -markers = "extra == \"prometheus\"" +name = "iniconfig" +version = "2.3.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "prometheus_client-0.11.0-py2.py3-none-any.whl", hash = "sha256:b014bc76815eb1399da8ce5fc84b7717a3e63652b0c0f8804092c9363acab1b2"}, - {file = "prometheus_client-0.11.0.tar.gz", hash = "sha256:3a8baade6cb80bcfe43297e33e7623f3118d660d41387593758e2fb1ea173a86"}, + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, +] + +[[package]] +name = "packaging" +version = "26.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-26.2-py3-none-any.whl", hash = "sha256:5fc45236b9446107ff2415ce77c807cee2862cb6fac22b8a73826d0693b0980e"}, + {file = "packaging-26.2.tar.gz", hash = "sha256:ff452ff5a3e828ce110190feff1178bb1f2ea2281fa2075aadb987c2fb221661"}, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "prometheus-client" +version = "0.25.0" +description = "Python client for the Prometheus monitoring system." +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"prometheus\"" +files = [ + {file = "prometheus_client-0.25.0-py3-none-any.whl", hash = "sha256:d5aec89e349a6ec230805d0df882f3807f74fd6c1a2fa86864e3c2279059fed1"}, + {file = "prometheus_client-0.25.0.tar.gz", hash = "sha256:5e373b75c31afb3c86f1a52fa1ad470c9aace18082d39ec0d2f918d11cc9ba28"}, +] + +[package.extras] +aiohttp = ["aiohttp"] +django = ["django"] twisted = ["twisted"] [[package]] @@ -270,6 +290,21 @@ files = [ {file = "pyarrow-24.0.0.tar.gz", hash = "sha256:85fe721a14dd823aca09127acbb06c3ca723efbd436c004f16bca601b04dcc83"}, ] +[[package]] +name = "pygments" +version = "2.20.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176"}, + {file = "pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pyserial" version = "3.5" @@ -285,6 +320,48 @@ files = [ [package.extras] cp2110 = ["hidapi"] +[[package]] +name = "pytest" +version = "9.0.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9"}, + {file = "pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c"}, +] + +[package.dependencies] +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +iniconfig = ">=1.0.1" +packaging = ">=22" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "7.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678"}, + {file = "pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2"}, +] + +[package.dependencies] +coverage = {version = ">=7.10.6", extras = ["toml"]} +pluggy = ">=1.2" +pytest = ">=7" + +[package.extras] +testing = ["process-tests", "pytest-xdist", "virtualenv"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -317,22 +394,6 @@ files = [ [package.dependencies] typing-extensions = ">=4.1.1,<5.0.0" -[[package]] -name = "setuptools" -version = "69.2.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = true -python-versions = ">=3.8" -files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.17.0" @@ -364,7 +425,9 @@ name = "urllib3" version = "2.6.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"influxdb\"" files = [ {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, @@ -377,10 +440,10 @@ socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] [extras] -influxdb = ["influxdb-client"] +influxdb = ["influxdb3-python", "typing-extensions"] prometheus = ["prometheus-client"] [metadata] lock-version = "2.1" python-versions = ">=3.12,<4.0" -content-hash = "2b55be52f78064d5a751d5cbdaa0ef8cc00ca0dd6b8d430f02325a9f85eb0842" +content-hash = "365e8ad492e514bbf9e48ed443fa5bd0dcd86f8970cbe1c7b654631f88866c3d" diff --git a/pyproject.toml b/pyproject.toml index 0787369..0dd1da5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "bmspy" -version = "2.0" +version = "2.1" description = "bmspy is a tool to get information from a xiaoxiang-type BMS system" authors = ["Timothy Allen "] license = "CC BY-NC-SA 4.0" @@ -8,19 +8,29 @@ readme = "README.md" [tool.poetry.dependencies] python = ">=3.12,<4.0" -pyserial = "^3.5" -influxdb3-python = {version = "^0.19.0", optional = true, extras = ["influxdb"]} +pyserial = ">=3.5" +influxdb3-python = {version = ">=0.19.0", optional = true, extras = ["influxdb"]} typing-extensions = {version = ">=4.13", optional = true, extras = ["influxdb"]} -prometheus-client = {version = "^0.11.0", optional = true, extras = ["prometheus"]} +prometheus-client = {version = ">=0.11.0", optional = true, extras = ["prometheus"]} [tool.poetry.extras] influxdb = ["influxdb3-python", "typing-extensions"] prometheus = ["prometheus-client"] +[tool.poetry.group.dev.dependencies] +pytest = ">=8.0" +pytest-cov = ">=7.1.0" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "if __name__ == .__main__.:", +] + [tool.poetry.scripts] bmspy = "bmspy:main" bmspy-server = "bmspy.server:main" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..b43f871 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,78 @@ +import pytest +from bmspy.classes import BMSScalarField, BMSMultiField, BMSInfoField +from bmspy.jbd_bms import JBDBMS + + +# --------------------------------------------------------------------------- +# Raw JBD BMS wire-format fixtures +# --------------------------------------------------------------------------- + +# 03 response: 31 bytes, data_len=25, 4 cells, 1 NTC sensor +# Encodes: 52.00V, 0.00A, 100.00Ah remaining, 100.00Ah nominal, +# 10 cycles, 2023-01-15, no protection faults, rsoc=95, +# charging+discharging MOSFET on, 4 cells, 25.0°C +VALID_03_RESPONSE = bytearray([ + 0xDD, 0xA5, 0x00, 0x19, # start, r/w, status OK, data_len=25 + 0x14, 0x50, # total voltage: 5200 * 0.01 = 52.00 V + 0x00, 0x00, # current: 0 + 0x27, 0x10, # remaining cap: 10000 * 0.01 = 100.00 Ah + 0x27, 0x10, # nominal cap: 10000 * 0.01 = 100.00 Ah + 0x00, 0x0A, # charge cycles: 10 + 0x2E, 0x2F, # manufacture date: 0x2E2F → 2023-01-15 + 0x00, 0x00, # balance state high (no balancing) + 0x00, 0x00, # balance state low (no balancing) + 0x00, 0x00, # protection state (no faults) + 0x00, # software version + 0x5F, # rsoc: 95 * 0.01 = 0.95 + 0x03, # control status: charging(bit0) + discharging(bit1) + 0x04, # cell count: 4 + 0x01, # NTC sensor count: 1 + 0x0B, 0xA5, # NTC 1: (0x0BA5=2981 - 2731) * 0.1 = 25.0 °C + 0xFD, 0x97, # checksum +]) + +# 04 response: 14 bytes, 4 cells +# Cell voltages: 3.600 V, 3.601 V, 3.599 V, 3.598 V +VALID_04_RESPONSE = bytearray([ + 0xDD, 0xA5, 0x00, 0x08, # start, r/w, status OK, data_len=8 + 0x0E, 0x10, # cell 1: 3600 * 0.001 = 3.600 V + 0x0E, 0x11, # cell 2: 3601 * 0.001 = 3.601 V + 0x0E, 0x0F, # cell 3: 3599 * 0.001 = 3.599 V + 0x0E, 0x0E, # cell 4: 3598 * 0.001 = 3.598 V + 0xFF, 0x82, # checksum +]) + + +@pytest.fixture +def valid_03_response() -> bytearray: + return bytearray(VALID_03_RESPONSE) + + +@pytest.fixture +def valid_04_response() -> bytearray: + return bytearray(VALID_04_RESPONSE) + + +@pytest.fixture +def populated_jbdbms() -> JBDBMS: + bms = JBDBMS() + bms.bms_voltage_total_volts = BMSScalarField( + help="Total Voltage", raw_value=52.0, value="52.00", units="V" + ) + bms.bms_current_amps = BMSScalarField( + help="Current", raw_value=0.0, value="0.00", units="A" + ) + bms.bms_capacity_charge_ratio = BMSScalarField( + help="Percent Charge", raw_value=0.95, value="0.95", units="‰" + ) + bms.bms_manufacture_date = BMSInfoField( + help="Date of Manufacture", info="2023-01-15" + ) + bms.bms_temperature_celcius = BMSMultiField( + help="Temperature", + label="sensor", + raw_values={1: 25.0}, + values={1: "25.00"}, + units="°C", + ) + return bms diff --git a/tests/test_classes.py b/tests/test_classes.py new file mode 100644 index 0000000..2e1c3ae --- /dev/null +++ b/tests/test_classes.py @@ -0,0 +1,162 @@ +import pytest +from bmspy.classes import ( + BMSScalarField, + BMSMultiField, + BMSInfoField, + UPS, + _field_from_dict, + _UPSSnapshot, +) + + +# --------------------------------------------------------------------------- +# BMSScalarField +# --------------------------------------------------------------------------- + +class TestBMSScalarField: + def test_get_existing_attribute(self): + f = BMSScalarField(help="Voltage", raw_value=52.0, value="52.00", units="V") + assert f.get("help") == "Voltage" + assert f.get("raw_value") == 52.0 + assert f.get("units") == "V" + + def test_get_missing_attribute_returns_default(self): + f = BMSScalarField(help="Voltage", raw_value=52.0, value="52.00") + assert f.get("nonexistent") is None + assert f.get("nonexistent", 42) == 42 + + def test_units_defaults_to_none(self): + f = BMSScalarField(help="Cycles", raw_value=10, value="10") + assert f.units is None + assert f.get("units") is None + + def test_bool_raw_value(self): + f = BMSScalarField(help="Flag", raw_value=True, value="1") + assert f.raw_value is True + + +# --------------------------------------------------------------------------- +# BMSMultiField +# --------------------------------------------------------------------------- + +class TestBMSMultiField: + def test_get_existing_attribute(self): + f = BMSMultiField( + help="Cell Voltages", + label="cell", + raw_values={1: 3.6, 2: 3.61}, + values={1: "3.600", 2: "3.610"}, + units="V", + ) + assert f.get("label") == "cell" + assert f.get("raw_values") == {1: 3.6, 2: 3.61} + assert f.get("units") == "V" + + def test_get_missing_returns_default(self): + f = BMSMultiField(help="h", label="l", raw_values={}, values={}) + assert f.get("missing") is None + assert f.get("missing", "fallback") == "fallback" + + +# --------------------------------------------------------------------------- +# BMSInfoField +# --------------------------------------------------------------------------- + +class TestBMSInfoField: + def test_get_existing_attribute(self): + f = BMSInfoField(help="Date of Manufacture", info="2023-01-15") + assert f.get("help") == "Date of Manufacture" + assert f.get("info") == "2023-01-15" + + def test_get_missing_returns_none(self): + f = BMSInfoField(help="h", info="i") + assert f.get("units") is None + + +# --------------------------------------------------------------------------- +# _field_from_dict +# --------------------------------------------------------------------------- + +class TestFieldFromDict: + def test_scalar_field(self): + d = {"help": "Voltage", "raw_value": 52.0, "value": "52.00", "units": "V"} + f = _field_from_dict(d) + assert isinstance(f, BMSScalarField) + assert f.raw_value == 52.0 + assert f.units == "V" + + def test_multi_field(self): + d = { + "help": "Cell Voltages", + "label": "cell", + "raw_values": {1: 3.6}, + "values": {1: "3.600"}, + "units": "V", + } + f = _field_from_dict(d) + assert isinstance(f, BMSMultiField) + assert f.label == "cell" + assert f.raw_values == {1: 3.6} + + def test_info_field(self): + d = {"help": "Date of Manufacture", "info": "2023-01-15"} + f = _field_from_dict(d) + assert isinstance(f, BMSInfoField) + assert f.info == "2023-01-15" + + + +# --------------------------------------------------------------------------- +# UPS / _UPSSnapshot +# --------------------------------------------------------------------------- + +class TestUPS: + def _snapshot(self): + return UPS.from_dict({ + "bms_voltage": {"help": "Voltage", "raw_value": 52.0, "value": "52.00", "units": "V"}, + "bms_date": {"help": "Date", "info": "2023-01-15"}, + "bms_temps": { + "help": "Temperature", + "label": "sensor", + "raw_values": {1: 25.0}, + "values": {1: "25.00"}, + "units": "°C", + }, + }) + + def test_from_dict_returns_ups_instance(self): + assert isinstance(self._snapshot(), UPS) + + def test_from_dict_scalar_field(self): + ups = self._snapshot() + items = dict(ups.items()) + assert isinstance(items["bms_voltage"], BMSScalarField) + assert items["bms_voltage"].raw_value == 52.0 + + def test_from_dict_info_field(self): + ups = self._snapshot() + items = dict(ups.items()) + assert isinstance(items["bms_date"], BMSInfoField) + assert items["bms_date"].info == "2023-01-15" + + def test_from_dict_multi_field(self): + ups = self._snapshot() + items = dict(ups.items()) + assert isinstance(items["bms_temps"], BMSMultiField) + assert items["bms_temps"].raw_values == {1: 25.0} + + def test_bool_true_when_populated(self): + assert bool(self._snapshot()) is True + + def test_bool_false_when_empty(self): + empty = UPS.from_dict({}) + assert bool(empty) is False + + def test_items_yields_all_fields(self): + ups = self._snapshot() + keys = [k for k, _ in ups.items()] + assert set(keys) == {"bms_voltage", "bms_date", "bms_temps"} + + def test_items_empty_snapshot(self): + ups = UPS.from_dict({}) + assert list(ups.items()) == [] diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 0000000..0993636 --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,368 @@ +from unittest.mock import patch, MagicMock + +import pytest + +from bmspy.classes import BMSInfoField, BMSMultiField, BMSScalarField, UPS +from bmspy.client import read_data + + +MOCK_RESPONSE = { + "myups": { + "bms_voltage_total_volts": { + "help": "Total Voltage", + "raw_value": 52.0, + "value": "52.00", + "units": "V", + }, + "bms_manufacture_date": { + "help": "Date of Manufacture", + "info": "2023-01-15", + }, + "bms_temperature_celcius": { + "help": "Temperature", + "label": "sensor", + "raw_values": {1: 25.0}, + "values": {1: "25.00"}, + "units": "°C", + }, + }, + "officeups": { + "bms_voltage_total_volts": { + "help": "Total Voltage", + "raw_value": 48.5, + "value": "48.50", + "units": "V", + }, + }, +} + + +class TestReadData: + def _call(self, response=None): + with patch("bmspy.client.socket_comms", return_value=response or MOCK_RESPONSE): + return read_data("/fake/socket", "test") + + def test_returns_dict_of_ups(self): + result = self._call() + assert isinstance(result, dict) + for v in result.values(): + assert isinstance(v, UPS) + + def test_all_devices_present(self): + result = self._call() + assert set(result.keys()) == {"myups", "officeups"} + + def test_scalar_field_deserialized(self): + result = self._call() + items = dict(result["myups"].items()) + f = items["bms_voltage_total_volts"] + assert isinstance(f, BMSScalarField) + assert f.raw_value == 52.0 + assert f.units == "V" + + def test_info_field_deserialized(self): + result = self._call() + items = dict(result["myups"].items()) + f = items["bms_manufacture_date"] + assert isinstance(f, BMSInfoField) + assert f.info == "2023-01-15" + + def test_multi_field_deserialized(self): + result = self._call() + items = dict(result["myups"].items()) + f = items["bms_temperature_celcius"] + assert isinstance(f, BMSMultiField) + assert f.raw_values == {1: 25.0} + assert f.label == "sensor" + + def test_ups_is_truthy_when_populated(self): + result = self._call() + assert bool(result["myups"]) is True + + def test_empty_device_response(self): + result = self._call({"emptyups": {}}) + assert isinstance(result["emptyups"], UPS) + assert bool(result["emptyups"]) is False + + def test_ups_filter_forwarded(self): + with patch("bmspy.client.socket_comms") as mock_comms: + mock_comms.return_value = {"myups": MOCK_RESPONSE["myups"]} + read_data("/fake/socket", "test", ups="myups") + call_args = mock_comms.call_args[0][1] + assert call_args.get("ups") == "myups" + + def test_no_ups_filter_not_in_request(self): + with patch("bmspy.client.socket_comms") as mock_comms: + mock_comms.return_value = {} + read_data("/fake/socket", "test") + call_args = mock_comms.call_args[0][1] + assert "ups" not in call_args + + +# --------------------------------------------------------------------------- +# handle_registration +# --------------------------------------------------------------------------- + +import bmspy.client as _client_mod + + +@pytest.fixture(autouse=True) +def _reset_is_registered(): + _client_mod.is_registered = False + yield + _client_mod.is_registered = False + + +class TestHandleRegistration: + def test_register_sets_flag(self): + with patch("bmspy.client.socket_comms", return_value={"status": "REGISTERED", "client": "test"}): + from bmspy.client import handle_registration + handle_registration("/fake/socket", "test") + assert _client_mod.is_registered is True + + def test_register_returns_response(self): + response = {"status": "REGISTERED", "client": "test"} + with patch("bmspy.client.socket_comms", return_value=response): + from bmspy.client import handle_registration + result = handle_registration("/fake/socket", "test") + assert result == response + + def test_register_sends_register_command(self): + with patch("bmspy.client.socket_comms") as mock_comms: + mock_comms.return_value = {"status": "REGISTERED", "client": "test"} + from bmspy.client import handle_registration + handle_registration("/fake/socket", "test") + sent = mock_comms.call_args[0][1] + assert sent["command"] == "REGISTER" + assert sent["client"] == "test" + + def test_deregister_clears_flag(self): + _client_mod.is_registered = True + with patch("bmspy.client.socket_comms", return_value={"status": "DEREGISTERED", "client": "test"}): + from bmspy.client import handle_registration + handle_registration("/fake/socket", "test") + assert _client_mod.is_registered is False + + def test_deregister_sends_deregister_command(self): + _client_mod.is_registered = True + with patch("bmspy.client.socket_comms") as mock_comms: + mock_comms.return_value = {"status": "DEREGISTERED", "client": "test"} + from bmspy.client import handle_registration + handle_registration("/fake/socket", "test") + sent = mock_comms.call_args[0][1] + assert sent["command"] == "DEREGISTER" + + def test_socket_error_does_not_raise(self): + with patch("bmspy.client.socket_comms", side_effect=Exception("connection refused")): + from bmspy.client import handle_registration + result = handle_registration("/fake/socket", "test") + assert result == {} + + def test_invalid_status_does_not_raise(self): + with patch("bmspy.client.socket_comms", return_value={"status": "UNKNOWN"}): + from bmspy.client import handle_registration + handle_registration("/fake/socket", "test") + + def test_debug_3_on_deregister_failure(self, capsys): + """debug=3 path in exception handler when is_registered=True.""" + _client_mod.is_registered = True + with patch("bmspy.client.socket_comms", side_effect=Exception("fail")): + from bmspy.client import handle_registration + handle_registration("/fake/socket", "test", debug=3) + captured = capsys.readouterr() + assert "deregister" in captured.out.lower() or "fail" in captured.out + + +# --------------------------------------------------------------------------- +# socket_comms — real Unix socket +# --------------------------------------------------------------------------- + +import json +import socket +import struct +import threading + + +class TestSocketComms: + """Test socket_comms with a real Unix socket server running in a thread.""" + + def _run_server(self, sock_path: str, response: dict, ready_event: threading.Event): + """Minimal server: accept one connection, read framed request, send framed response.""" + srv = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + srv.bind(sock_path) + srv.listen(1) + ready_event.set() + conn, _ = srv.accept() + try: + # read length + raw_len = conn.recv(4) + if len(raw_len) < 4: + return # Client disconnected early + length = struct.unpack("!I", raw_len)[0] + data = conn.recv(length) + # send response + payload = json.dumps(response).encode() + conn.sendall(struct.pack("!I", len(payload)) + payload) + finally: + conn.close() + srv.close() + import os + try: + os.unlink(sock_path) + except FileNotFoundError: + pass + + def test_returns_response_dict(self, tmp_path): + from bmspy.client import socket_comms + sock_path = str(tmp_path / "test.sock") + response = {"status": "REGISTERED", "client": "test"} + ready = threading.Event() + t = threading.Thread( + target=self._run_server, args=(sock_path, response, ready), daemon=True + ) + t.start() + ready.wait(timeout=2) + result = socket_comms(sock_path, {"command": "REGISTER", "client": "test"}) + t.join(timeout=2) + assert result == response + + def test_debug_3_does_not_raise(self, tmp_path, capsys): + from bmspy.client import socket_comms + sock_path = str(tmp_path / "test_dbg.sock") + response = {"status": "OK"} + ready = threading.Event() + t = threading.Thread( + target=self._run_server, args=(sock_path, response, ready), daemon=True + ) + t.start() + ready.wait(timeout=2) + result = socket_comms(sock_path, {"command": "GET", "client": "test"}, debug=3) + t.join(timeout=2) + assert result == response + + def test_debug_4_does_not_raise(self, tmp_path, capsys): + from bmspy.client import socket_comms + sock_path = str(tmp_path / "test_dbg4.sock") + response = {"status": "OK"} + ready = threading.Event() + t = threading.Thread( + target=self._run_server, args=(sock_path, response, ready), daemon=True + ) + t.start() + ready.wait(timeout=2) + result = socket_comms(sock_path, {"command": "GET", "client": "test"}, debug=4) + t.join(timeout=2) + assert result == response + + def test_debug_5_does_not_raise(self, tmp_path, capsys): + from bmspy.client import socket_comms + sock_path = str(tmp_path / "test_dbg5.sock") + response = {"status": "OK"} + ready = threading.Event() + t = threading.Thread( + target=self._run_server, args=(sock_path, response, ready), daemon=True + ) + t.start() + ready.wait(timeout=2) + result = socket_comms(sock_path, {"command": "GET", "client": "test"}, debug=5) + t.join(timeout=2) + assert result == response + + def test_connection_refused_does_not_raise(self, tmp_path, capsys): + """socket_comms gracefully handles ENOENT (no server).""" + from bmspy.client import socket_comms + import sys + sock_path = str(tmp_path / "nonexistent.sock") + # socket_comms calls sys.exit(1) on encode errors, but connection failures + # just print and continue (then fail on recv). We expect SystemExit. + with pytest.raises((SystemExit, OSError, Exception)): + socket_comms(sock_path, {"command": "GET", "client": "test"}) + + def _run_bad_response_server(self, sock_path: str, bad_payload: bytes, ready_event: threading.Event): + """Server that sends a bad (non-JSON) response.""" + srv = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + srv.bind(sock_path) + srv.listen(1) + ready_event.set() + conn, _ = srv.accept() + try: + # read request + raw_len = conn.recv(4) + length = struct.unpack("!I", raw_len)[0] + conn.recv(length) + # send bad response + conn.sendall(struct.pack("!I", len(bad_payload)) + bad_payload) + finally: + conn.close() + srv.close() + import os + try: + os.unlink(sock_path) + except FileNotFoundError: + pass + + def test_invalid_json_response_exits(self, tmp_path, capsys): + """socket_comms calls sys.exit(1) on invalid JSON response.""" + from bmspy.client import socket_comms + sock_path = str(tmp_path / "bad_json.sock") + bad_payload = b"not json!!!" + ready = threading.Event() + t = threading.Thread( + target=self._run_bad_response_server, + args=(sock_path, bad_payload, ready), + daemon=True, + ) + t.start() + ready.wait(timeout=2) + with pytest.raises(SystemExit): + socket_comms(sock_path, {"command": "GET", "client": "test"}) + t.join(timeout=2) + + def test_json_encode_failure_exits(self, tmp_path, capsys): + """socket_comms calls sys.exit(1) when json.dumps raises.""" + from bmspy.client import socket_comms + sock_path = str(tmp_path / "encode_err.sock") + # Create a minimal server so the socket connect succeeds + ready = threading.Event() + response = {"status": "OK"} + t = threading.Thread( + target=self._run_server, args=(sock_path, response, ready), daemon=True + ) + t.start() + ready.wait(timeout=2) + with patch("bmspy.client.json.dumps", side_effect=TypeError("not serializable")): + with pytest.raises(SystemExit): + socket_comms(sock_path, {"command": "GET", "client": "test"}) + t.join(timeout=2) + + def test_non_enoent_socket_error_logs_message(self, tmp_path, capsys): + """socket_comms logs a different message for non-ENOENT socket errors.""" + from bmspy.client import socket_comms + import socket as _socket + import errno + + sock_path = str(tmp_path / "test_err.sock") + # Make connect raise a socket.error with errno != 2 + err = _socket.error("connection refused") + err.errno = errno.ECONNREFUSED # not 2 (ENOENT) + + with patch("bmspy.client.socket.socket") as mock_sock_cls: + mock_sock = MagicMock() + mock_sock_cls.return_value = mock_sock + mock_sock.connect.side_effect = err + # after connect fails, sendall will also fail, triggering sys.exit + with pytest.raises((SystemExit, Exception)): + socket_comms(sock_path, {"command": "GET", "client": "test"}) + captured = capsys.readouterr() + assert "socket client" in captured.out or "connection refused" in captured.out.lower() + + +# --------------------------------------------------------------------------- +# read_data — socket_comms returns None path +# --------------------------------------------------------------------------- + +class TestReadDataNone: + def test_raises_runtime_error_when_none(self): + with patch("bmspy.client.socket_comms", return_value=None): + with pytest.raises(RuntimeError, match="No data received"): + read_data("/fake/socket", "test") diff --git a/tests/test_influxdb.py b/tests/test_influxdb.py new file mode 100644 index 0000000..6a76e8c --- /dev/null +++ b/tests/test_influxdb.py @@ -0,0 +1,373 @@ +import pytest +pytest.importorskip("influxdb_client_3", reason="influxdb3-python not installed") + +import pytest +from unittest.mock import patch, MagicMock +from bmspy.classes import UPS +from bmspy.influxdb import influxdb_create_snapshot + + +def _ups(*field_dicts: dict) -> dict[str, UPS]: + """Helper: build a {ups_name: UPS} map from a list of field dicts.""" + fields = {} + for d in field_dicts: + fields[d.pop("_name")] = d + return {"testups": UPS.from_dict(fields)} + + +class TestInfluxdbCreateSnapshot: + def test_scalar_field_produces_one_point(self): + ups_data = {"myups": UPS.from_dict({ + "bms_voltage": {"help": "Voltage", "raw_value": 52.0, "value": "52.00", "units": "V"}, + })} + points = influxdb_create_snapshot(ups_data) + assert len(points) == 1 + + def test_multi_field_produces_one_point_per_index(self): + ups_data = {"myups": UPS.from_dict({ + "bms_cells": { + "help": "Cell Voltages", + "label": "cell", + "raw_values": {1: 3.6, 2: 3.61, 3: 3.59}, + "values": {1: "3.600", 2: "3.610", 3: "3.590"}, + "units": "V", + }, + })} + points = influxdb_create_snapshot(ups_data) + assert len(points) == 3 + + def test_info_field_produces_one_point(self): + ups_data = {"myups": UPS.from_dict({ + "bms_date": {"help": "Manufacture Date", "info": "2023-01-15"}, + })} + points = influxdb_create_snapshot(ups_data) + assert len(points) == 1 + + def test_mixed_fields_sum_correctly(self): + ups_data = {"myups": UPS.from_dict({ + "bms_voltage": {"help": "Voltage", "raw_value": 52.0, "value": "52.00", "units": "V"}, + "bms_date": {"help": "Date", "info": "2023-01-15"}, + "bms_cells": { + "help": "Cells", + "label": "cell", + "raw_values": {1: 3.6, 2: 3.61}, + "values": {1: "3.600", 2: "3.610"}, + "units": "V", + }, + })} + # 1 scalar + 1 info + 2 multi = 4 points + points = influxdb_create_snapshot(ups_data) + assert len(points) == 4 + + def test_multiple_ups_devices(self): + field = {"help": "Voltage", "raw_value": 52.0, "value": "52.00", "units": "V"} + ups_data = { + "ups1": UPS.from_dict({"bms_voltage": dict(field)}), + "ups2": UPS.from_dict({"bms_voltage": dict(field)}), + } + points = influxdb_create_snapshot(ups_data) + assert len(points) == 2 + + def test_empty_ups_produces_no_points(self): + ups_data = {"myups": UPS.from_dict({})} + points = influxdb_create_snapshot(ups_data) + assert points == [] + + def test_point_measurement_name(self): + ups_data = {"myups": UPS.from_dict({ + "bms_voltage_total_volts": { + "help": "Voltage", "raw_value": 52.0, "value": "52.00", "units": "V" + }, + })} + points = influxdb_create_snapshot(ups_data) + assert len(points) == 1 + # Point measurement name should match the field key + assert points[0]._name == "bms_voltage_total_volts" + + def test_debug_mode_does_not_raise(self): + ups_data = {"myups": UPS.from_dict({ + "bms_voltage": {"help": "V", "raw_value": 52.0, "value": "52.00", "units": "V"}, + })} + # debug=3 triggers the debugger() calls — should not raise + influxdb_create_snapshot(ups_data, debug=3) + + def test_scalar_point_has_ups_tag(self): + ups_data = {"myups": UPS.from_dict({ + "bms_voltage": {"help": "V", "raw_value": 52.0, "value": "52.00", "units": "V"}, + })} + points = influxdb_create_snapshot(ups_data) + assert points[0]._tags.get("ups") == "myups" + + def test_multi_field_points_have_label_tag(self): + ups_data = {"myups": UPS.from_dict({ + "bms_cells": { + "help": "Cells", + "label": "cell", + "raw_values": {1: 3.6, 2: 3.61}, + "values": {1: "3.600", 2: "3.610"}, + "units": "V", + }, + })} + points = influxdb_create_snapshot(ups_data) + assert all("cell" in p._tags for p in points) + + def test_info_field_point_value(self): + ups_data = {"myups": UPS.from_dict({ + "bms_date": {"help": "Manufacture Date", "info": "2023-01-15"}, + })} + points = influxdb_create_snapshot(ups_data) + assert points[0]._fields.get("value") == "2023-01-15" + + +# --------------------------------------------------------------------------- +# influxdb_write_snapshot +# --------------------------------------------------------------------------- + +class TestInfluxdbWriteSnapshot: + def test_write_called_once(self): + from unittest.mock import MagicMock + from bmspy.influxdb import influxdb_write_snapshot + mock_client = MagicMock() + ups_data = {"myups": UPS.from_dict({ + "bms_voltage": {"help": "V", "raw_value": 52.0, "value": "52.00", "units": "V"}, + })} + influxdb_write_snapshot(mock_client, "test_bucket", ups_data) + mock_client.write.assert_called_once() + + def test_write_uses_correct_database(self): + from unittest.mock import MagicMock + from bmspy.influxdb import influxdb_write_snapshot + mock_client = MagicMock() + ups_data = {"myups": UPS.from_dict({ + "bms_voltage": {"help": "V", "raw_value": 52.0, "value": "52.00", "units": "V"}, + })} + influxdb_write_snapshot(mock_client, "mybucket", ups_data) + call_kwargs = mock_client.write.call_args[1] + assert call_kwargs.get("database") == "mybucket" + + def test_write_exception_does_not_raise(self): + from unittest.mock import MagicMock + from bmspy.influxdb import influxdb_write_snapshot + mock_client = MagicMock() + mock_client.write.side_effect = Exception("connection failed") + ups_data = {"myups": UPS.from_dict({ + "bms_voltage": {"help": "V", "raw_value": 52.0, "value": "52.00", "units": "V"}, + })} + influxdb_write_snapshot(mock_client, "mybucket", ups_data) + + def test_empty_ups_writes_no_points(self): + from unittest.mock import MagicMock + from bmspy.influxdb import influxdb_write_snapshot + mock_client = MagicMock() + influxdb_write_snapshot(mock_client, "mybucket", {"myups": UPS.from_dict({})}) + call_kwargs = mock_client.write.call_args[1] + assert call_kwargs.get("record") == [] + + +# --------------------------------------------------------------------------- +# influxdb_export (non-daemonized) +# --------------------------------------------------------------------------- + +class TestInfluxdbExport: + def test_single_write_when_not_daemonized(self): + from unittest.mock import MagicMock, patch + from bmspy.influxdb import influxdb_export + mock_instance = MagicMock() + ups_data = {"myups": UPS.from_dict({ + "bms_voltage": {"help": "V", "raw_value": 52.0, "value": "52.00", "units": "V"}, + })} + with patch("bmspy.influxdb.InfluxDBClient3", return_value=mock_instance), \ + patch("bmspy.influxdb.client.read_data", return_value=ups_data): + influxdb_export( + bucket="test", + url="http://localhost", + org="org", + token="token", + daemonize=False, + ) + mock_instance.write.assert_called_once() + + def test_client_closed_after_non_daemonized_run(self): + from unittest.mock import MagicMock, patch + from bmspy.influxdb import influxdb_export + mock_instance = MagicMock() + ups_data = {"myups": UPS.from_dict({})} + with patch("bmspy.influxdb.InfluxDBClient3", return_value=mock_instance), \ + patch("bmspy.influxdb.client.read_data", return_value=ups_data): + influxdb_export( + bucket="test", + url="http://localhost", + org="org", + token="token", + daemonize=False, + ) + mock_instance.close.assert_called() + + def test_env_vars_used_when_no_url(self, monkeypatch): + from unittest.mock import MagicMock, patch + from bmspy.influxdb import influxdb_export + monkeypatch.setenv("INFLUXDB_V2_URL", "http://envhost") + monkeypatch.setenv("INFLUXDB_V2_ORG", "envorg") + monkeypatch.setenv("INFLUXDB_V2_TOKEN", "envtoken") + mock_instance = MagicMock() + ups_data = {"myups": UPS.from_dict({})} + with patch("bmspy.influxdb.InfluxDBClient3", return_value=mock_instance) as mock_cls, \ + patch("bmspy.influxdb.client.read_data", return_value=ups_data): + influxdb_export(bucket="test", daemonize=False) + # Should have been called with env URL + call_kwargs = mock_cls.call_args[1] + assert call_kwargs.get("host") == "http://envhost" + + def test_daemonize_true_loops_until_exception(self): + from unittest.mock import MagicMock, patch + from bmspy.influxdb import influxdb_export + mock_instance = MagicMock() + ups_data = {"myups": UPS.from_dict({})} + call_count = 0 + + def _read_data(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count >= 2: + raise StopIteration("stop") + return ups_data + + with patch("bmspy.influxdb.InfluxDBClient3", return_value=mock_instance), \ + patch("bmspy.influxdb.client.read_data", side_effect=_read_data), \ + patch("bmspy.influxdb.time.sleep"): + with pytest.raises(StopIteration): + influxdb_export( + bucket="test", + url="http://localhost", + org="org", + token="token", + daemonize=True, + ) + assert call_count >= 2 + + +# --------------------------------------------------------------------------- +# influx_shutdown +# --------------------------------------------------------------------------- + +class TestInfluxShutdown: + def test_none_is_no_op(self): + from bmspy.influxdb import influx_shutdown + # Should not raise + influx_shutdown(None) + + def test_calls_close_on_client(self): + from unittest.mock import MagicMock + from bmspy.influxdb import influx_shutdown + mock_client = MagicMock() + influx_shutdown(mock_client) + mock_client.close.assert_called_once() + + +# --------------------------------------------------------------------------- +# influxdb_write_snapshot debug coverage +# --------------------------------------------------------------------------- + +class TestInfluxdbWriteSnapshotDebug: + def test_debug_2_logs_messages(self, capsys): + from unittest.mock import MagicMock + from bmspy.influxdb import influxdb_write_snapshot + mock_client = MagicMock() + ups_data = {"myups": UPS.from_dict({ + "bms_voltage": {"help": "V", "raw_value": 52.0, "value": "52.00", "units": "V"}, + })} + influxdb_write_snapshot(mock_client, "bucket", ups_data, debug=2) + captured = capsys.readouterr() + assert "snapshot" in captured.out.lower() + + +# --------------------------------------------------------------------------- +# influxdb_create_snapshot additional debug paths +# --------------------------------------------------------------------------- + +class TestInfluxdbCreateSnapshotDebug: + def test_debug_3_scalar_logs(self, capsys): + ups_data = {"myups": UPS.from_dict({ + "bms_voltage": {"help": "V", "raw_value": 52.0, "value": "52.00", "units": "V"}, + })} + influxdb_create_snapshot(ups_data, debug=3) + captured = capsys.readouterr() + assert "value" in captured.out.lower() + + def test_debug_3_multi_logs(self, capsys): + ups_data = {"myups": UPS.from_dict({ + "bms_cells": { + "help": "Cells", + "label": "cell", + "raw_values": {1: 3.6}, + "values": {1: "3.600"}, + "units": "V", + }, + })} + influxdb_create_snapshot(ups_data, debug=3) + captured = capsys.readouterr() + assert "labels" in captured.out.lower() + + def test_debug_3_info_logs(self, capsys): + ups_data = {"myups": UPS.from_dict({ + "bms_date": {"help": "Date", "info": "2023-01-15"}, + })} + influxdb_create_snapshot(ups_data, debug=3) + captured = capsys.readouterr() + assert "info" in captured.out.lower() + + +# --------------------------------------------------------------------------- +# influxdb main() +# --------------------------------------------------------------------------- + +class TestInfluxdbMain: + def test_main_missing_url_exits(self, monkeypatch): + from bmspy.influxdb import main + monkeypatch.delenv("INFLUXDB_V2_URL", raising=False) + monkeypatch.delenv("INFLUXDB_V2_ORG", raising=False) + monkeypatch.delenv("INFLUXDB_V2_TOKEN", raising=False) + with patch("sys.argv", ["bmspy-influxdb"]): + with pytest.raises(SystemExit): + main() + + def test_main_missing_org_exits(self, monkeypatch): + from bmspy.influxdb import main + monkeypatch.setenv("INFLUXDB_V2_URL", "http://host") + monkeypatch.delenv("INFLUXDB_V2_ORG", raising=False) + monkeypatch.delenv("INFLUXDB_V2_TOKEN", raising=False) + with patch("sys.argv", ["bmspy-influxdb"]): + with pytest.raises(SystemExit): + main() + + def test_main_missing_token_exits(self, monkeypatch): + from bmspy.influxdb import main + monkeypatch.setenv("INFLUXDB_V2_URL", "http://host") + monkeypatch.setenv("INFLUXDB_V2_ORG", "myorg") + monkeypatch.delenv("INFLUXDB_V2_TOKEN", raising=False) + with patch("sys.argv", ["bmspy-influxdb"]): + with pytest.raises(SystemExit): + main() + + def test_main_calls_influxdb_export(self, monkeypatch): + from unittest.mock import MagicMock, patch + from bmspy.influxdb import main + mock_export = MagicMock() + with patch("sys.argv", ["bmspy-influxdb", "--url", "http://host", "--org", "org", "--token", "tok"]), \ + patch("bmspy.influxdb.client.handle_registration"), \ + patch("bmspy.influxdb.influxdb_export", mock_export): + main() + mock_export.assert_called_once() + + def test_main_with_env_vars_calls_export(self, monkeypatch): + from unittest.mock import MagicMock, patch + from bmspy.influxdb import main + monkeypatch.setenv("INFLUXDB_V2_URL", "http://envhost") + monkeypatch.setenv("INFLUXDB_V2_ORG", "envorg") + monkeypatch.setenv("INFLUXDB_V2_TOKEN", "envtoken") + mock_export = MagicMock() + with patch("sys.argv", ["bmspy-influxdb"]), \ + patch("bmspy.influxdb.client.handle_registration"), \ + patch("bmspy.influxdb.influxdb_export", mock_export): + main() + mock_export.assert_called_once() diff --git a/tests/test_init.py b/tests/test_init.py new file mode 100644 index 0000000..71e38bb --- /dev/null +++ b/tests/test_init.py @@ -0,0 +1,177 @@ +import argparse +import sys +from unittest.mock import patch, MagicMock + +import pytest + +from bmspy import parse_args, main +from bmspy.classes import UPS + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _make_ups_data(): + return {"testups": UPS.from_dict({ + "bms_voltage": {"help": "V", "raw_value": 52.0, "value": "52.00", "units": "V"}, + "bms_date": {"help": "Date", "info": "2023-01-15"}, + })} + + +class TestParseArgs: + def _parse(self, args: list[str]): + with patch("sys.argv", ["bmspy"] + args): + return parse_args() + + def test_socket_default(self): + assert self._parse([]).socket == "/run/bmspy/bms" + + def test_socket_long(self): + assert self._parse(["--socket", "/tmp/test.sock"]).socket == "/tmp/test.sock" + + def test_socket_short(self): + assert self._parse(["-s", "/tmp/test.sock"]).socket == "/tmp/test.sock" + + def test_ups_default_is_none(self): + assert self._parse([]).ups is None + + def test_ups_filter(self): + assert self._parse(["--ups", "myups"]).ups == "myups" + + def test_json_default_false(self): + assert self._parse([]).report_json is False + + def test_json_long(self): + assert self._parse(["--json"]).report_json is True + + def test_json_short(self): + assert self._parse(["-j"]).report_json is True + + def test_print_default_true(self): + assert self._parse([]).report_print is True + + def test_prometheus_default_false(self): + assert self._parse([]).report_prometheus is False + + def test_prometheus_flag(self): + assert self._parse(["--prometheus"]).report_prometheus is True + + def test_influxdb_default_false(self): + assert self._parse([]).report_influxdb is False + + def test_influxdb_long(self): + assert self._parse(["--influxdb"]).report_influxdb is True + + def test_influxdb_short(self): + assert self._parse(["-i"]).report_influxdb is True + + def test_bucket_default(self): + assert self._parse([]).influx_bucket == "ups" + + def test_bucket_long(self): + assert self._parse(["--bucket", "mybucket"]).influx_bucket == "mybucket" + + def test_bucket_short(self): + assert self._parse(["-b", "mybucket"]).influx_bucket == "mybucket" + + def test_url_default_false(self): + assert self._parse([]).influx_url is False + + def test_url_long(self): + assert self._parse(["--url", "http://influx.example.com"]).influx_url == "http://influx.example.com" + + def test_org_long(self): + assert self._parse(["--org", "myorg"]).influx_org == "myorg" + + def test_token_long(self): + assert self._parse(["--token", "mytoken"]).influx_token == "mytoken" + + def test_verbose_default_zero(self): + assert self._parse([]).verbose == 0 + + def test_verbose_once(self): + assert self._parse(["-v"]).verbose == 1 + + def test_verbose_multiple(self): + assert self._parse(["-v", "-v", "-v"]).verbose == 3 + + def test_verbose_long(self): + assert self._parse(["--verbose"]).verbose == 1 + + +# --------------------------------------------------------------------------- +# main() +# --------------------------------------------------------------------------- + +class TestMain: + def _run_main(self, args: list[str]): + with patch("sys.argv", ["bmspy"] + args): + main() + + def test_print_mode_default(self, capsys): + ups_data = _make_ups_data() + with patch("sys.argv", ["bmspy"]), \ + patch("bmspy.client.handle_registration"), \ + patch("bmspy.client.read_data", return_value=ups_data): + main() + captured = capsys.readouterr() + assert "testups" in captured.out + + def test_json_mode(self, capsys): + ups_data = _make_ups_data() + with patch("sys.argv", ["bmspy", "--json"]), \ + patch("bmspy.client.handle_registration"), \ + patch("bmspy.client.read_data", return_value=ups_data): + main() + captured = capsys.readouterr() + # JSON output should contain field names + assert "bms_voltage" in captured.out + + def test_keyboard_interrupt_is_caught(self, capsys): + with patch("sys.argv", ["bmspy"]), \ + patch("bmspy.client.handle_registration"), \ + patch("bmspy.client.read_data", side_effect=KeyboardInterrupt("stopped")): + main() # should not raise + + def test_prometheus_flag_calls_export(self): + import bmspy.prometheus as prom_mod + mock_export = MagicMock() + with patch("sys.argv", ["bmspy", "--prometheus"]), \ + patch.object(prom_mod, "prometheus_export", mock_export): + main() + mock_export.assert_called_once() + + def test_textfile_flag_calls_prometheus_export(self, tmp_path): + import bmspy.prometheus as prom_mod + filename = str(tmp_path / "metrics.prom") + mock_export = MagicMock() + with patch("sys.argv", ["bmspy", "--file", filename]), \ + patch.object(prom_mod, "prometheus_export", mock_export): + main() + mock_export.assert_called_once() + call_kwargs = mock_export.call_args[1] + assert call_kwargs.get("daemonize") is False + assert call_kwargs.get("filename") == filename + + def test_influxdb_flag_calls_export(self): + pytest.importorskip("influxdb_client_3", reason="influxdb3-python not installed") + import bmspy.influxdb as influx_mod + mock_export = MagicMock() + with patch("sys.argv", ["bmspy", "--influxdb", "--url", "http://influx", "--org", "org", "--token", "tok"]), \ + patch.object(influx_mod, "influxdb_export", mock_export): + main() + mock_export.assert_called_once() + + def test_influxdb_partial_args_exits(self, capsys): + """Providing only one of --url/--org/--token raises ArgumentTypeError (caught by KeyboardInterrupt handler).""" + with patch("sys.argv", ["bmspy", "--influxdb", "--url", "http://influx"]): + with pytest.raises(argparse.ArgumentTypeError): + main() + + def test_influxdb_partial_raises_argument_error(self): + """Two of three influx args causes ArgumentTypeError.""" + import argparse as _ap + with patch("sys.argv", ["bmspy", "--influxdb", "--url", "http://influx", "--org", "org"]): + with pytest.raises(_ap.ArgumentTypeError): + main() diff --git a/tests/test_jbd_bms.py b/tests/test_jbd_bms.py new file mode 100644 index 0000000..2d428c3 --- /dev/null +++ b/tests/test_jbd_bms.py @@ -0,0 +1,704 @@ +import pytest +from unittest.mock import MagicMock, patch +from bmspy.jbd_bms import ( + JBDBMS, + bytes_to_digits, + bytes_to_date, + convert_to_signed, + verify_checksum, + parse_03_response, + parse_04_response, + requestMessage, + serial_cleanup, + collect_data, +) +from bmspy.classes import BMSScalarField, BMSMultiField, BMSInfoField, UPS + + +# --------------------------------------------------------------------------- +# bytes_to_digits +# --------------------------------------------------------------------------- + +class TestBytesToDigits: + def test_zero(self): + assert bytes_to_digits(0x00, 0x00) == 0 + + def test_low_byte_only(self): + assert bytes_to_digits(0x00, 0x0A) == 10 + + def test_high_byte_only(self): + assert bytes_to_digits(0x01, 0x00) == 256 + + def test_combined(self): + assert bytes_to_digits(0x14, 0x50) == 5200 + + def test_max(self): + assert bytes_to_digits(0xFF, 0xFF) == 65535 + + +# --------------------------------------------------------------------------- +# bytes_to_date +# --------------------------------------------------------------------------- + +class TestBytesToDate: + def test_known_date(self): + # 0x2E2F = 11823; day=15, mon=1, year=2023 + assert bytes_to_date(0x2E, 0x2F) == "2023-01-15" + + def test_zero_encodes_epoch(self): + # day=0, mon=0, year=2000 + assert bytes_to_date(0x00, 0x00) == "2000-00-00" + + def test_day_field(self): + # Only day bits set: 0x001F → day=31, mon=0, year=2000 + assert bytes_to_date(0x00, 0x1F) == "2000-00-31" + + def test_month_field(self): + # month=12: bits [8:5] = 0b1100 = 12 → raw = 12 << 5 = 384 = 0x0180 + assert bytes_to_date(0x01, 0x80) == "2000-12-00" + + def test_year_field(self): + # year offset = 24 → value = 24 << 9 = 12288 = 0x3000 + assert bytes_to_date(0x30, 0x00) == "2024-00-00" + + +# --------------------------------------------------------------------------- +# convert_to_signed +# --------------------------------------------------------------------------- + +class TestConvertToSigned: + def test_zero(self): + assert convert_to_signed(0) == 0 + + def test_small_positive(self): + assert convert_to_signed(100) == 100 + + def test_below_threshold(self): + assert convert_to_signed(1023) == 1023 + + def test_at_threshold_maps_to_zero(self): + # 1024 → (1024-512) % 1024 - 512 = 0 + assert convert_to_signed(1024) == 0 + + def test_just_above_threshold_maps_to_positive(self): + assert convert_to_signed(1025) == 1 + + def test_maps_to_negative(self): + # 2047 → (2047-512)%1024 - 512 = 1535%1024 - 512 = 511-512 = -1 + assert convert_to_signed(2047) == -1 + + def test_maps_to_most_negative(self): + # 1536 → (1536-512)%1024 - 512 = 1024%1024 - 512 = -512 + assert convert_to_signed(1536) == -512 + + +# --------------------------------------------------------------------------- +# verify_checksum +# --------------------------------------------------------------------------- + +class TestVerifyChecksum: + def _make_checksum(self, data: bytes) -> bytes: + s = sum(data) + s = (s ^ 0xFFFF) + 1 + return bytes([s >> 8, s & 0xFF]) + + def test_correct_checksum(self): + data = bytes([0x1B, 0x14, 0x50, 0x00]) + chk = self._make_checksum(data) + assert verify_checksum(data, chk) is True + + def test_single_byte(self): + data = bytes([0x42]) + chk = self._make_checksum(data) + assert verify_checksum(data, chk) is True + + def test_wrong_checksum(self): + data = bytes([0x10, 0x20]) + assert verify_checksum(data, bytes([0x00, 0x00])) is False + + def test_off_by_one(self): + data = bytes([0x10, 0x20]) + chk = self._make_checksum(data) + bad = bytes([chk[0], chk[1] ^ 0x01]) + assert verify_checksum(data, bad) is False + + def test_empty_data(self): + # sum=0 → s = (0^0xFFFF)+1 = 65536, which can never equal a 2-byte chk + assert verify_checksum(bytes(), bytes([0xFF, 0xFF])) is False + + +# --------------------------------------------------------------------------- +# JBDBMS +# --------------------------------------------------------------------------- + +class TestJBDBMS: + def test_empty_is_falsy(self): + assert not JBDBMS() + + def test_populated_is_truthy(self, populated_jbdbms): + assert bool(populated_jbdbms) + + def test_items_skips_none_fields(self): + bms = JBDBMS() + bms.bms_voltage_total_volts = BMSScalarField( + help="Total Voltage", raw_value=52.0, value="52.00", units="V" + ) + keys = [k for k, _ in bms.items()] + assert keys == ["bms_voltage_total_volts"] + + def test_items_yields_all_populated_fields(self, populated_jbdbms): + keys = {k for k, _ in populated_jbdbms.items()} + assert "bms_voltage_total_volts" in keys + assert "bms_current_amps" in keys + assert "bms_manufacture_date" in keys + assert "bms_temperature_celcius" in keys + + def test_items_yields_correct_types(self, populated_jbdbms): + d = dict(populated_jbdbms.items()) + assert isinstance(d["bms_voltage_total_volts"], BMSScalarField) + assert isinstance(d["bms_manufacture_date"], BMSInfoField) + assert isinstance(d["bms_temperature_celcius"], BMSMultiField) + + def test_is_ups_subclass(self): + assert isinstance(JBDBMS(), UPS) + + +# --------------------------------------------------------------------------- +# parse_03_response +# --------------------------------------------------------------------------- + +class TestParse03Response: + def test_valid_response_returns_jbdbms(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert isinstance(result, JBDBMS) + + def test_voltage(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_voltage_total_volts.raw_value == pytest.approx(52.00) + assert result.bms_voltage_total_volts.units == "V" + + def test_current(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_current_amps.raw_value == pytest.approx(0.0) + + def test_remaining_capacity(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_capacity_remaining_ah.raw_value == pytest.approx(100.00) + + def test_nominal_capacity(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_capacity_nominal_ah.raw_value == pytest.approx(100.00) + + def test_charge_cycles(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_charge_cycles.raw_value == 10 + + def test_manufacture_date(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_manufacture_date.info == "2023-01-15" + + def test_rsoc(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_capacity_charge_ratio.raw_value == pytest.approx(0.95) + + def test_cell_count(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_cell_number.raw_value == 4 + + def test_temperature(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_temperature_celcius.raw_values[1] == pytest.approx(25.0) + assert result.bms_temperature_celcius.units == "°C" + + def test_mosfet_charging(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_charge_is_charging.raw_value is True + + def test_mosfet_discharging(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_charge_is_discharging.raw_value is True + + def test_no_protection_faults(self, valid_03_response): + result = parse_03_response(valid_03_response) + assert result.bms_protection_sop_bool.raw_value is False + assert result.bms_protection_cocp_bool.raw_value is False + + def test_wrong_start_byte_returns_false(self, valid_03_response): + valid_03_response[0] = 0xAA + assert parse_03_response(valid_03_response) is False + + def test_error_status_byte_returns_false(self, valid_03_response): + valid_03_response[2] = 0x80 + assert parse_03_response(valid_03_response) is False + + def test_bad_checksum_returns_false(self, valid_03_response): + valid_03_response[-1] ^= 0xFF # corrupt last checksum byte + assert parse_03_response(valid_03_response) is False + + def test_truncated_response_returns_false(self, valid_03_response): + assert parse_03_response(valid_03_response[:10]) is False + + def test_zero_data_len_returns_false(self, valid_03_response): + valid_03_response[3] = 0x00 + assert parse_03_response(valid_03_response) is False + + +# --------------------------------------------------------------------------- +# parse_04_response +# --------------------------------------------------------------------------- + +class TestParse04Response: + def test_valid_response_returns_multi_field(self, valid_04_response): + result = parse_04_response(valid_04_response) + assert isinstance(result, BMSMultiField) + + def test_cell_count(self, valid_04_response): + result = parse_04_response(valid_04_response) + assert len(result.raw_values) == 4 + + def test_cell_voltages(self, valid_04_response): + result = parse_04_response(valid_04_response) + assert result.raw_values[1] == pytest.approx(3.600) + assert result.raw_values[2] == pytest.approx(3.601) + assert result.raw_values[3] == pytest.approx(3.599) + assert result.raw_values[4] == pytest.approx(3.598) + + def test_cell_voltage_units(self, valid_04_response): + result = parse_04_response(valid_04_response) + assert result.units == "V" + assert result.label == "cell" + + def test_wrong_start_byte_returns_false(self, valid_04_response): + valid_04_response[0] = 0xAA + assert parse_04_response(valid_04_response) is False + + def test_error_status_byte_returns_false(self, valid_04_response): + valid_04_response[2] = 0x80 + assert parse_04_response(valid_04_response) is False + + def test_bad_checksum_returns_false(self, valid_04_response): + valid_04_response[-1] ^= 0xFF + assert parse_04_response(valid_04_response) is False + + def test_truncated_response_returns_false(self, valid_04_response): + assert parse_04_response(valid_04_response[:5]) is False + + def test_zero_data_len_returns_false(self, valid_04_response): + valid_04_response[3] = 0x00 + assert parse_04_response(valid_04_response) is False + + +# --------------------------------------------------------------------------- +# parse_03_response — protection bits and other field variations +# --------------------------------------------------------------------------- + +def _recompute_checksum(response: bytearray) -> None: + """Recompute and update the JBD frame checksum in-place.""" + data_len = response[3] + first = data_len + 4 + s = sum(response[3:first]) + s = (s ^ 0xFFFF) + 1 + response[first] = (s >> 8) & 0xFF + response[first + 1] = s & 0xFF + + +class TestParse03ProtectionBits: + def test_sop_bit_set(self, valid_03_response): + valid_03_response[20] = 0x00 + valid_03_response[21] = 0x01 # bit 0 = SOP + _recompute_checksum(valid_03_response) + result = parse_03_response(valid_03_response) + assert result.bms_protection_sop_bool.raw_value is True + assert result.bms_protection_sup_bool.raw_value is False + + def test_cocp_bit_set(self, valid_03_response): + valid_03_response[20] = 0x01 # bit 8 = COCP (high byte bit 0) + valid_03_response[21] = 0x00 + _recompute_checksum(valid_03_response) + result = parse_03_response(valid_03_response) + assert result.bms_protection_cocp_bool.raw_value is True + + def test_all_protections_clear(self, valid_03_response): + result = parse_03_response(valid_03_response) + for attr in [ + "bms_protection_sop_bool", "bms_protection_sup_bool", + "bms_protection_wgop_bool", "bms_protection_wgup_bool", + "bms_protection_cotp_bool", "bms_protection_cutp_bool", + "bms_protection_dotp_bool", "bms_protection_dutp_bool", + "bms_protection_cocp_bool", "bms_protection_docp_bool", + "bms_protection_scp_bool", "bms_protection_fdic_bool", + "bms_protection_slmos_bool", + ]: + assert getattr(result, attr).raw_value is False, f"{attr} should be False" + + def test_negative_current(self, valid_03_response): + # 1536 = 0x0600; convert_to_signed(1536) = -512; -512 * 0.01 = -5.12 A + valid_03_response[6] = 0x06 + valid_03_response[7] = 0x00 + _recompute_checksum(valid_03_response) + result = parse_03_response(valid_03_response) + assert result.bms_current_amps.raw_value == pytest.approx(-5.12) + + def test_cell_1_balancing(self, valid_03_response): + # balance_state_low = bytes_to_digits(response[18], response[19]) + # bit 0 of balance_state_low → cell 1 balancing + valid_03_response[18] = 0x00 + valid_03_response[19] = 0x01 + _recompute_checksum(valid_03_response) + result = parse_03_response(valid_03_response) + assert result.bms_cells_balancing.raw_values[1] is True + assert result.bms_cells_balancing.raw_values[2] is False + + def test_mosfet_only_charging(self, valid_03_response): + # control_status = 0x01 → charging only + valid_03_response[24] = 0x01 + _recompute_checksum(valid_03_response) + result = parse_03_response(valid_03_response) + assert result.bms_charge_is_charging.raw_value is True + assert result.bms_charge_is_discharging.raw_value is False + + def test_mosfet_only_discharging(self, valid_03_response): + # control_status = 0x02 → discharging only + valid_03_response[24] = 0x02 + _recompute_checksum(valid_03_response) + result = parse_03_response(valid_03_response) + assert result.bms_charge_is_charging.raw_value is False + assert result.bms_charge_is_discharging.raw_value is True + + def test_two_temperature_sensors(self): + from tests.conftest import VALID_03_RESPONSE + # Build a modified 03 response with 2 NTC sensors + # data_len changes from 25 to 27 (add 2 bytes for NTC 2) + response = bytearray(VALID_03_RESPONSE[:29]) # bytes 0-28 + response[3] = 0x1B # data_len = 27 + response[26] = 0x02 # NTC count = 2 + response += bytearray([0x0B, 0x6B]) # NTC 2: (2923-2731)*0.1 = 19.2°C + response += bytearray([0x00, 0x00]) # placeholder checksum + _recompute_checksum(response) + result = parse_03_response(response) + assert isinstance(result, JBDBMS) + assert len(result.bms_temperature_celcius.raw_values) == 2 + assert result.bms_temperature_celcius.raw_values[1] == pytest.approx(25.0) + assert result.bms_temperature_celcius.raw_values[2] == pytest.approx(19.2) + + +# --------------------------------------------------------------------------- +# serial_cleanup +# --------------------------------------------------------------------------- + +class TestSerialCleanup: + def test_closes_open_port(self): + ser = MagicMock() + ser.is_open = True + serial_cleanup(ser) + ser.reset_input_buffer.assert_called() + ser.reset_output_buffer.assert_called() + ser.close.assert_called_once() + + def test_does_not_close_if_not_open(self): + ser = MagicMock() + ser.is_open = False + serial_cleanup(ser) + ser.close.assert_not_called() + + def test_debug_3_logs_message(self, capsys): + ser = MagicMock() + ser.is_open = True + serial_cleanup(ser, debug=3) + captured = capsys.readouterr() + assert "cleaning up" in captured.out + + +# --------------------------------------------------------------------------- +# requestMessage +# --------------------------------------------------------------------------- + +class TestRequestMessage: + def _make_serial(self, response_bytes=b"\x77"): + ser = MagicMock() + ser.is_open = True + ser.in_waiting = 1 + ser.write.return_value = 7 + ser.read_until.return_value = response_bytes + return ser + + def test_returns_response_bytes(self): + payload = b"\xDD\xA5\x00\x04\x01\x02\x03\x04\x77" + ser = self._make_serial(payload) + reqmsg = bytearray([0xDD, 0xA5, 0x03, 0x00, 0xFF, 0xFD, 0x77]) + result = requestMessage(ser, reqmsg) + assert result == payload + + def test_open_failure_returns_false(self): + ser = MagicMock() + ser.is_open = True + ser.open.side_effect = Exception("port not found") + result = requestMessage(ser, bytearray([0xDD, 0xA5, 0x03, 0x00, 0xFF, 0xFD, 0x77])) + assert result is False + + def test_short_write_returns_false(self): + ser = self._make_serial() + ser.write.return_value = 3 # fewer bytes than message length + result = requestMessage(ser, bytearray([0xDD, 0xA5, 0x03, 0x00, 0xFF, 0xFD, 0x77])) + assert result is False + + def test_empty_response_returns_empty_string(self): + ser = self._make_serial(b"") + result = requestMessage(ser, bytearray([0xDD, 0xA5, 0x03, 0x00, 0xFF, 0xFD, 0x77])) + assert result == "" + + def test_exception_during_read_logs_error(self, capsys): + """When read_until raises, requestMessage logs the exception.""" + ser = MagicMock() + ser.is_open = True + ser.in_waiting = 1 + ser.write.return_value = 7 + ser.read_until.side_effect = Exception("serial port error") + result = requestMessage(ser, bytearray([0xDD, 0xA5, 0x03, 0x00, 0xFF, 0xFD, 0x77])) + assert result is None + captured = capsys.readouterr() + assert "error communicating" in captured.out.lower() + + def test_debug_3_logs_startup(self, capsys): + payload = b"\xDD\xA5\x00\x04\x01\x02\x03\x04\x77" + ser = self._make_serial(payload) + requestMessage(ser, bytearray([0xDD, 0xA5, 0x03, 0x00, 0xFF, 0xFD, 0x77]), debug=3) + captured = capsys.readouterr() + assert "starting up monitor" in captured.out + + def test_wait_timeout_returns_empty_string(self): + """When in_waiting stays 0 long enough, returns empty string.""" + ser = MagicMock() + ser.is_open = True + ser.in_waiting = 0 + ser.write.return_value = 7 + call_count = 0 + + def _in_waiting_prop(): + nonlocal call_count + call_count += 1 + return 0 + + # Simulate in_waiting always 0 → timeout after wait_time > 2 + ser_mock = MagicMock() + ser_mock.is_open = True + ser_mock.write.return_value = 7 + + # Make in_waiting always return 0 (property mock) + type(ser_mock).in_waiting = property(lambda self: 0) + + with patch("bmspy.jbd_bms.time.sleep"): + result = requestMessage( + ser_mock, + bytearray([0xDD, 0xA5, 0x03, 0x00, 0xFF, 0xFD, 0x77]), + debug=3, + ) + assert result == "" + + def test_cannot_open_port_returns_none(self): + """When ser.is_open is False after open() call, returns None.""" + ser = MagicMock() + ser.is_open = False + # open() doesn't raise but port remains closed + ser.open.return_value = None + result = requestMessage(ser, bytearray([0xDD, 0xA5, 0x03, 0x00, 0xFF, 0xFD, 0x77])) + assert result is None + + +# --------------------------------------------------------------------------- +# collect_data +# --------------------------------------------------------------------------- + +class TestCollectData: + def test_successful_collect_returns_jbdbms(self, valid_03_response, valid_04_response): + responses = [bytes(valid_03_response), bytes(valid_04_response)] + idx = 0 + def _req(ser, msg, debug=0): + nonlocal idx + r = responses[idx]; idx += 1; return r + with patch("bmspy.jbd_bms.requestMessage", side_effect=_req): + result = collect_data(MagicMock()) + assert isinstance(result, JBDBMS) + assert result.bms_voltage_cells_volts is not None + + def test_empty_03_response_returns_false(self): + with patch("bmspy.jbd_bms.requestMessage", return_value=b""): + result = collect_data(MagicMock()) + assert result is False + + def test_empty_04_response_returns_false(self, valid_03_response): + responses = [bytes(valid_03_response), b""] + idx = 0 + def _req(ser, msg, debug=0): + nonlocal idx + r = responses[idx]; idx += 1; return r + with patch("bmspy.jbd_bms.requestMessage", side_effect=_req): + result = collect_data(MagicMock()) + assert result is False + + def test_bad_03_checksum_returns_false(self, valid_03_response, valid_04_response): + valid_03_response[-1] ^= 0xFF # corrupt checksum + responses = [bytes(valid_03_response), bytes(valid_04_response)] + idx = 0 + def _req(ser, msg, debug=0): + nonlocal idx + r = responses[idx]; idx += 1; return r + with patch("bmspy.jbd_bms.requestMessage", side_effect=_req): + result = collect_data(MagicMock()) + assert result is False + + def test_bad_04_checksum_returns_false(self, valid_03_response, valid_04_response): + valid_04_response[-1] ^= 0xFF # corrupt checksum + responses = [bytes(valid_03_response), bytes(valid_04_response)] + idx = 0 + def _req(ser, msg, debug=0): + nonlocal idx + r = responses[idx]; idx += 1; return r + with patch("bmspy.jbd_bms.requestMessage", side_effect=_req): + result = collect_data(MagicMock()) + assert result is False + + def test_collect_data_debug_1(self, valid_03_response, valid_04_response, capsys): + responses = [bytes(valid_03_response), bytes(valid_04_response)] + idx = 0 + def _req(ser, msg, debug=0): + nonlocal idx + r = responses[idx]; idx += 1; return r + with patch("bmspy.jbd_bms.requestMessage", side_effect=_req): + result = collect_data(MagicMock(), debug=1) + assert isinstance(result, JBDBMS) + + +# --------------------------------------------------------------------------- +# parse_03 and parse_04 debug coverage +# --------------------------------------------------------------------------- + +class TestParse03Debug: + def test_debug_2_logs_voltage(self, valid_03_response, capsys): + parse_03_response(valid_03_response, debug=2) + captured = capsys.readouterr() + assert "voltage" in captured.out.lower() or "52" in captured.out + + def test_debug_3_logs_data_length(self, valid_03_response, capsys): + parse_03_response(valid_03_response, debug=3) + captured = capsys.readouterr() + assert "data length" in captured.out.lower() or "25" in captured.out + + def test_debug_3_logs_protection_state(self, valid_03_response, capsys): + parse_03_response(valid_03_response, debug=3) + captured = capsys.readouterr() + assert "protection state" in captured.out.lower() or "sop" in captured.out.lower() + + +class TestParse04Debug: + def test_debug_2_logs_cell_voltage(self, valid_04_response, capsys): + parse_04_response(valid_04_response, debug=2) + captured = capsys.readouterr() + assert "cell" in captured.out.lower() or "3.6" in captured.out + + def test_debug_3_logs_data_length(self, valid_04_response, capsys): + parse_04_response(valid_04_response, debug=3) + captured = capsys.readouterr() + assert "data length" in captured.out.lower() or "8" in captured.out + + +# --------------------------------------------------------------------------- +# parse_03_response — cells >= 16 (branch coverage) +# --------------------------------------------------------------------------- + +class TestCalculateChecksum: + def test_returns_empty_string(self): + from bmspy.jbd_bms import calculate_checksum + result = calculate_checksum(b"\x01\x02\x03") + assert result == "" + + +class TestParse03DataLenZero: + def test_data_len_zero_with_valid_checksum_returns_false(self): + """Build a response where data_len=0 and checksum is valid.""" + response = bytearray([ + 0xDD, 0xA5, 0x00, 0x00, # data_len = 0 + 0x00, 0x00, # checksum positions (first=4, second=5) + 0x77, # end + ]) + _recompute_checksum(response) + result = parse_03_response(response) + assert result is False + + +class TestParse04DataLenZero: + def test_data_len_zero_with_valid_checksum_returns_false(self): + """Build a parse_04 response where data_len=0 and checksum is valid.""" + response = bytearray([ + 0xDD, 0xA5, 0x00, 0x00, # data_len = 0 + 0x00, 0x00, # checksum positions + 0x77, # end + ]) + _recompute_checksum(response) + result = parse_04_response(response) + assert result is False + + +class TestParse03HighCellCount: + def test_17_cells_uses_high_balance_state(self): + """Build a 17-cell response to cover the cell >= 16 branch.""" + from tests.conftest import VALID_03_RESPONSE + # We need data_len = 25 + (17-4)*2 = 25 for 4 NTCs... actually we just + # need 17 cells. data_len stays 25 but we set cell count to 17. + response = bytearray(VALID_03_RESPONSE) + # Set cell count to 17 + response[25] = 17 + # Recompute checksum + _recompute_checksum(response) + result = parse_03_response(response) + assert isinstance(result, JBDBMS) + assert result.bms_cell_number.raw_value == 17 + # Cell 17 should be in bms_cells_balancing + assert 17 in result.bms_cells_balancing.raw_values + + +# --------------------------------------------------------------------------- +# collect_data debug paths +# --------------------------------------------------------------------------- + +class TestCollectDataDebug: + def test_debug_1_empty_03_logs(self, capsys): + with patch("bmspy.jbd_bms.requestMessage", return_value=b""): + collect_data(MagicMock(), debug=1) + captured = capsys.readouterr() + assert "error" in captured.out.lower() + + def test_debug_1_empty_04_logs(self, valid_03_response, capsys): + responses = [bytes(valid_03_response), b""] + idx = 0 + def _req(ser, msg, debug=0): + nonlocal idx + r = responses[idx]; idx += 1; return r + with patch("bmspy.jbd_bms.requestMessage", side_effect=_req): + collect_data(MagicMock(), debug=1) + captured = capsys.readouterr() + assert "error" in captured.out.lower() + + +# --------------------------------------------------------------------------- +# initialise_serial — covered with mocking +# --------------------------------------------------------------------------- + +class TestInitialiseSerial: + def test_returns_serial_object(self): + from bmspy.jbd_bms import initialise_serial + with patch("bmspy.jbd_bms.serial.Serial") as mock_serial_cls: + mock_ser = MagicMock() + mock_serial_cls.return_value = mock_ser + result = initialise_serial("/dev/ttyUSB0", debug=0) + assert result is mock_ser + + def test_sets_serial_params(self): + from bmspy.jbd_bms import initialise_serial + import serial as _serial + with patch("bmspy.jbd_bms.serial.Serial") as mock_serial_cls: + mock_ser = MagicMock() + mock_serial_cls.return_value = mock_ser + initialise_serial("/dev/ttyUSB0") + # Verify parity was set + assert mock_ser.parity == _serial.PARITY_NONE diff --git a/tests/test_prometheus.py b/tests/test_prometheus.py new file mode 100644 index 0000000..085d901 --- /dev/null +++ b/tests/test_prometheus.py @@ -0,0 +1,277 @@ +import pytest +pytest.importorskip("prometheus_client", reason="prometheus-client not installed") + +import os +import tempfile +from unittest.mock import patch, MagicMock + +from prometheus_client import CollectorRegistry + +from bmspy.classes import UPS, BMSScalarField, BMSMultiField, BMSInfoField +from bmspy.prometheus import ( + prometheus_create_metric, + prometheus_populate_metric, + prometheus_export, +) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _scalar_ups(**fields): + """Build a UPS with one scalar field per kwarg (name -> raw_value).""" + field_dicts = { + name: {"help": name, "raw_value": val, "value": str(val), "units": "V"} + for name, val in fields.items() + } + return UPS.from_dict(field_dicts) + + +def _make_ups_data(): + """Build a dict[str, UPS] with scalar, multi, and info fields.""" + return { + "testups": UPS.from_dict({ + "bms_voltage": { + "help": "Total Voltage", + "raw_value": 52.0, + "value": "52.00", + "units": "V", + }, + "bms_cells": { + "help": "Cell Voltages", + "label": "cell", + "raw_values": {1: 3.6, 2: 3.61}, + "values": {1: "3.600", 2: "3.610"}, + "units": "V", + }, + "bms_date": { + "help": "Manufacture Date", + "info": "2023-01-15", + }, + }) + } + + +# --------------------------------------------------------------------------- +# prometheus_create_metric +# --------------------------------------------------------------------------- + +class TestPrometheusCreateMetric: + def test_scalar_creates_gauge(self): + registry = CollectorRegistry(auto_describe=True) + ups_data = _scalar_ups(bms_voltage=52.0) + metric = prometheus_create_metric(registry, {"myups": ups_data}) + from prometheus_client import Gauge + assert isinstance(metric["bms_voltage"], Gauge) + + def test_multi_creates_gauge_with_label(self): + registry = CollectorRegistry(auto_describe=True) + ups_data = {"myups": UPS.from_dict({ + "bms_cells": { + "help": "Cells", + "label": "cell", + "raw_values": {1: 3.6}, + "values": {1: "3.600"}, + "units": "V", + }, + })} + metric = prometheus_create_metric(registry, ups_data) + from prometheus_client import Gauge + assert isinstance(metric["bms_cells"], Gauge) + + def test_info_creates_info_metric(self): + registry = CollectorRegistry(auto_describe=True) + ups_data = {"myups": UPS.from_dict({ + "bms_date": {"help": "Date", "info": "2023-01-15"}, + })} + metric = prometheus_create_metric(registry, ups_data) + from prometheus_client import Info + assert isinstance(metric["bms_date"], Info) + + def test_skips_duplicate_across_ups_devices(self): + registry = CollectorRegistry(auto_describe=True) + field = {"help": "V", "raw_value": 52.0, "value": "52.00", "units": "V"} + ups_data = { + "ups1": UPS.from_dict({"bms_voltage": dict(field)}), + "ups2": UPS.from_dict({"bms_voltage": dict(field)}), + } + metric = prometheus_create_metric(registry, ups_data) + # Should only have one entry, not raise on duplicate registration + assert "bms_voltage" in metric + + def test_all_field_types_in_one_call(self): + registry = CollectorRegistry(auto_describe=True) + metric = prometheus_create_metric(registry, _make_ups_data()) + assert "bms_voltage" in metric + assert "bms_cells" in metric + assert "bms_date" in metric + + def test_empty_ups_data_returns_empty_dict(self): + registry = CollectorRegistry(auto_describe=True) + metric = prometheus_create_metric(registry, {"myups": UPS.from_dict({})}) + assert metric == {} + + +# --------------------------------------------------------------------------- +# prometheus_populate_metric +# --------------------------------------------------------------------------- + +class TestPrometheusPopulateMetric: + def test_scalar_value_set(self): + registry = CollectorRegistry(auto_describe=True) + ups_data = {"testups": _scalar_ups(bms_voltage=52.0)} + metric = prometheus_create_metric(registry, ups_data) + prometheus_populate_metric(metric, ups_data) + # Verify via registry output + from prometheus_client import generate_latest + output = generate_latest(registry).decode() + assert "52.0" in output + + def test_multi_values_set(self): + registry = CollectorRegistry(auto_describe=True) + ups_data = {"testups": UPS.from_dict({ + "bms_cells": { + "help": "Cells", + "label": "cell", + "raw_values": {1: 3.6, 2: 3.61}, + "values": {1: "3.600", 2: "3.610"}, + "units": "V", + }, + })} + metric = prometheus_create_metric(registry, ups_data) + prometheus_populate_metric(metric, ups_data) + from prometheus_client import generate_latest + output = generate_latest(registry).decode() + assert "3.6" in output + + def test_info_value_set(self): + registry = CollectorRegistry(auto_describe=True) + ups_data = {"testups": UPS.from_dict({ + "bms_date": {"help": "Date", "info": "2023-01-15"}, + })} + metric = prometheus_create_metric(registry, ups_data) + prometheus_populate_metric(metric, ups_data) + from prometheus_client import generate_latest + output = generate_latest(registry).decode() + assert "2023-01-15" in output + + def test_missing_metric_key_is_skipped(self): + """populate should not crash if a field name is not in metric dict.""" + registry = CollectorRegistry(auto_describe=True) + ups_data = {"testups": _scalar_ups(bms_voltage=52.0)} + # Create metrics for a different field name + other_metric = {} + prometheus_populate_metric(other_metric, ups_data) # should not raise + + +# --------------------------------------------------------------------------- +# prometheus_export daemonize=True (start_http_server path) +# --------------------------------------------------------------------------- + +class TestPrometheusExportDaemonize: + def test_daemonize_calls_start_http_server(self): + """When daemonize=True, start_http_server is called and loop runs once then exits.""" + ups_data = {"testups": _scalar_ups(bms_voltage=52.0)} + call_count = 0 + + def _read_data(*args, **kwargs): + nonlocal call_count + call_count += 1 + return ups_data + + # Make time.sleep raise StopIteration after first call to exit the daemon loop + with patch("bmspy.prometheus.client.read_data", side_effect=_read_data), \ + patch("bmspy.prometheus.prometheus_client.start_http_server") as mock_start, \ + patch("bmspy.prometheus.time.sleep", side_effect=StopIteration("stop")): + with pytest.raises(StopIteration): + prometheus_export(daemonize=True) + mock_start.assert_called_once() + + +# --------------------------------------------------------------------------- +# prometheus_export +# --------------------------------------------------------------------------- + +class TestPrometheusExport: + def test_export_to_textfile(self, tmp_path): + filename = str(tmp_path / "metrics.prom") + ups_data = {"testups": _scalar_ups(bms_voltage=52.0)} + with patch("bmspy.prometheus.client.read_data", return_value=ups_data): + result = prometheus_export(daemonize=False, filename=filename) + assert result is True + assert os.path.exists(filename) + + def test_export_no_filename_returns_false(self): + ups_data = {"testups": _scalar_ups(bms_voltage=52.0)} + with patch("bmspy.prometheus.client.read_data", return_value=ups_data): + result = prometheus_export(daemonize=False, filename=None) + assert result is False + + def test_export_waits_for_data(self, tmp_path): + """When read_data returns empty first, then real data, export should work.""" + filename = str(tmp_path / "metrics.prom") + ups_data = {"testups": _scalar_ups(bms_voltage=52.0)} + call_count = 0 + + def _read_data(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count < 2: + return {} + return ups_data + + with patch("bmspy.prometheus.client.read_data", side_effect=_read_data), \ + patch("bmspy.prometheus.time.sleep"): + result = prometheus_export(daemonize=False, filename=filename) + assert result is True + assert call_count == 2 + + def test_export_with_all_field_types(self, tmp_path): + filename = str(tmp_path / "metrics.prom") + ups_data = _make_ups_data() + with patch("bmspy.prometheus.client.read_data", return_value=ups_data): + result = prometheus_export(daemonize=False, filename=filename) + assert result is True + content = open(filename).read() + assert "bms_voltage" in content + assert "bms_cells" in content + assert "bms_date" in content + + +# --------------------------------------------------------------------------- +# prometheus main() +# --------------------------------------------------------------------------- + +class TestPrometheusMain: + def _ups_dict(self): + return {"testups": _scalar_ups(bms_voltage=52.0)} + + def test_main_with_file_arg(self, tmp_path): + from bmspy.prometheus import main + filename = str(tmp_path / "metrics.prom") + with patch("sys.argv", ["bmspy-prometheus", "--file", filename]), \ + patch("bmspy.prometheus.client.read_data", return_value=self._ups_dict()): + main() + assert os.path.exists(filename) + + def test_main_with_socket_arg(self, tmp_path): + from bmspy.prometheus import main + filename = str(tmp_path / "metrics.prom") + with patch("sys.argv", ["bmspy-prometheus", "--file", filename, "--socket", "/tmp/test.sock"]), \ + patch("bmspy.prometheus.client.read_data", return_value=self._ups_dict()): + main() + + def test_main_with_ups_arg(self, tmp_path): + from bmspy.prometheus import main + filename = str(tmp_path / "metrics.prom") + with patch("sys.argv", ["bmspy-prometheus", "--file", filename, "--ups", "myups"]), \ + patch("bmspy.prometheus.client.read_data", return_value=self._ups_dict()): + main() + + def test_main_with_verbose(self, tmp_path): + from bmspy.prometheus import main + filename = str(tmp_path / "metrics.prom") + with patch("sys.argv", ["bmspy-prometheus", "--file", filename, "-v"]), \ + patch("bmspy.prometheus.client.read_data", return_value=self._ups_dict()): + main() diff --git a/tests/test_server.py b/tests/test_server.py new file mode 100644 index 0000000..0f9acf7 --- /dev/null +++ b/tests/test_server.py @@ -0,0 +1,957 @@ +import json +import socket as socket_module +import struct +import threading + +import pytest +import serial + +from bmspy.classes import BMSScalarField, BMSInfoField, UPS +from bmspy.jbd_bms import JBDBMS +from bmspy.server import DeviceState, parse_device, read_request, send_response + + +# --------------------------------------------------------------------------- +# parse_device +# --------------------------------------------------------------------------- + +class TestParseDevice: + def test_plain_path(self): + assert parse_device("/dev/ttyUSB0") == ("ttyUSB0", "/dev/ttyUSB0") + + def test_named_path(self): + assert parse_device("myups:/dev/ttyUSB1") == ("myups", "/dev/ttyUSB1") + + def test_nested_path(self): + assert parse_device("/dev/serial/by-id/usb-FTDI") == ( + "usb-FTDI", + "/dev/serial/by-id/usb-FTDI", + ) + + def test_name_without_slash(self): + # No "/" prefix, no ":" → treated as a plain path; last segment is name + assert parse_device("ttyUSB0") == ("ttyUSB0", "ttyUSB0") + + def test_name_colon_path_no_leading_slash(self): + assert parse_device("office:/dev/ttyUSB2") == ("office", "/dev/ttyUSB2") + + +# --------------------------------------------------------------------------- +# DeviceState +# --------------------------------------------------------------------------- + +class TestDeviceState: + def test_defaults(self): + ser = serial.Serial() + ds = DeviceState(ser=ser) + assert ds.data is None + assert ds.timestamp == 0.0 + assert ds.ser is ser + + def test_fields_are_mutable(self): + ser = serial.Serial() + ds = DeviceState(ser=ser) + ds.timestamp = 123.4 + ds.data = UPS.from_dict({}) + assert ds.timestamp == 123.4 + assert isinstance(ds.data, UPS) + + +# --------------------------------------------------------------------------- +# read_request / send_response round-trip +# --------------------------------------------------------------------------- + +def _send_framed(sock: socket_module.socket, data: dict) -> None: + payload = json.dumps(data).encode() + sock.sendall(struct.pack("!I", len(payload)) + payload) + + +def _recv_framed(sock: socket_module.socket) -> dict: + length = struct.unpack("!I", sock.recv(4))[0] + return json.loads(sock.recv(length)) + + +class TestReadRequest: + def test_round_trip(self): + srv, cli = socket_module.socketpair() + try: + _send_framed(cli, {"command": "GET", "client": "test"}) + result = read_request(srv) + assert result == {"command": "GET", "client": "test"} + finally: + srv.close() + cli.close() + + def test_with_ups_filter(self): + srv, cli = socket_module.socketpair() + try: + _send_framed(cli, {"command": "GET", "client": "test", "ups": "myups"}) + result = read_request(srv) + assert result["ups"] == "myups" + finally: + srv.close() + cli.close() + + +class TestSendResponse: + def test_plain_dict(self): + srv, cli = socket_module.socketpair() + try: + send_response(srv, {"status": "REGISTERED", "client": "test"}, "test") + result = _recv_framed(cli) + assert result == {"status": "REGISTERED", "client": "test"} + finally: + srv.close() + cli.close() + + def test_ups_object_serialized_via_items(self): + """UPS objects must be serialized using items(), not dataclass_asdict.""" + bms = JBDBMS() + bms.bms_voltage_total_volts = BMSScalarField( + help="Total Voltage", raw_value=52.0, value="52.00", units="V" + ) + bms.bms_manufacture_date = BMSInfoField( + help="Date of Manufacture", info="2023-01-15" + ) + + srv, cli = socket_module.socketpair() + try: + send_response(srv, {"myups": bms}, "test") + result = _recv_framed(cli) + finally: + srv.close() + cli.close() + + assert "myups" in result + assert "bms_voltage_total_volts" in result["myups"] + assert result["myups"]["bms_voltage_total_volts"]["raw_value"] == 52.0 + # None fields must not appear + assert "bms_current_amps" not in result["myups"] + # client field must not appear + assert "client" not in result["myups"] + + def test_empty_ups_serializes_to_empty_dict(self): + srv, cli = socket_module.socketpair() + try: + send_response(srv, {"myups": JBDBMS()}, "test") + result = _recv_framed(cli) + finally: + srv.close() + cli.close() + assert result["myups"] == {} + + def test_plain_dict_passthrough(self): + srv, cli = socket_module.socketpair() + try: + send_response(srv, {"key": "value", "number": 42}, "test") + result = _recv_framed(cli) + finally: + srv.close() + cli.close() + assert result == {"key": "value", "number": 42} + + def test_closed_socket_raises_os_error(self): + srv, cli = socket_module.socketpair() + srv.close() + cli.close() + with pytest.raises(OSError): + send_response(srv, {"status": "OK"}, "test") + + +# --------------------------------------------------------------------------- +# signalHandler +# --------------------------------------------------------------------------- + +class TestSignalHandler: + def test_raises_system_exit(self): + from bmspy.server import signalHandler + with pytest.raises(SystemExit): + signalHandler() + + def test_exit_message_contains_terminating(self): + from bmspy.server import signalHandler + with pytest.raises(SystemExit, match="terminating"): + signalHandler() + + +# --------------------------------------------------------------------------- +# socket_cleanup +# --------------------------------------------------------------------------- + +class TestSocketCleanup: + def test_removes_socket_file(self, tmp_path): + from bmspy.server import socket_cleanup + sock_file = tmp_path / "test.sock" + sock_file.touch() + assert sock_file.exists() + socket_cleanup(str(sock_file)) + assert not sock_file.exists() + + def test_raises_when_file_missing(self, tmp_path): + from bmspy.server import socket_cleanup + with pytest.raises(FileNotFoundError): + socket_cleanup(str(tmp_path / "nonexistent.sock")) + + +# --------------------------------------------------------------------------- +# read_request — error paths +# --------------------------------------------------------------------------- + +class TestReadRequestErrors: + def test_invalid_json_raises_exception(self): + srv, cli = socket_module.socketpair() + try: + invalid_payload = b"not valid json !!!" + cli.sendall(struct.pack("!I", len(invalid_payload)) + invalid_payload) + with pytest.raises(Exception, match="unable to read incoming request"): + read_request(srv) + finally: + srv.close() + cli.close() + + def test_truncated_length_bytes_raises(self): + srv, cli = socket_module.socketpair() + try: + cli.sendall(b"\x00\x00") # only 2 of 4 length bytes, then close + cli.close() + with pytest.raises(Exception): + read_request(srv) + finally: + srv.close() + + def test_recv_raises_os_error(self): + """When recv raises on first read, read_request should raise OSError.""" + from unittest.mock import MagicMock + mock_conn = MagicMock() + mock_conn.recv.side_effect = OSError("connection reset") + with pytest.raises(OSError, match="unable to read request length"): + read_request(mock_conn) + + def test_recv_body_raises_os_error(self): + """When recv raises on second read (body), should raise OSError.""" + from unittest.mock import MagicMock + import struct + mock_conn = MagicMock() + length_bytes = struct.pack("!I", 10) + # First recv returns valid length bytes, second recv raises + mock_conn.recv.side_effect = [length_bytes, OSError("body read error")] + with pytest.raises(OSError, match="unable to read socket"): + read_request(mock_conn) + + def test_debug_5_logs_length(self, capsys): + srv, cli = socket_module.socketpair() + try: + _send_framed(cli, {"command": "GET", "client": "test"}) + read_request(srv, debug=5) + finally: + srv.close() + cli.close() + # debug > 4 logs incoming length + captured = capsys.readouterr() + assert "incoming length" in captured.out + + def test_debug_4_logs_request_bytes(self, capsys): + srv, cli = socket_module.socketpair() + try: + _send_framed(cli, {"command": "GET", "client": "test"}) + read_request(srv, debug=4) + finally: + srv.close() + cli.close() + captured = capsys.readouterr() + assert "incoming request" in captured.out + + def test_debug_3_logs_received(self, capsys): + srv, cli = socket_module.socketpair() + try: + _send_framed(cli, {"command": "GET", "client": "test"}) + read_request(srv, debug=3) + finally: + srv.close() + cli.close() + captured = capsys.readouterr() + assert "received" in captured.out + + +class TestServerMain: + """Test the server main() function by running it in a thread and sending real socket commands.""" + + def _make_server_thread(self, sock_path: str, ready_event: threading.Event, + stop_event: threading.Event, **kwargs): + """Run server main() in a thread with mocked serial and collect_data.""" + import socket as _socket + from unittest.mock import MagicMock, patch + from bmspy.server import main as server_main + from bmspy.classes import BMSScalarField + from bmspy.jbd_bms import JBDBMS + + # Build a fake JBDBMS result + fake_bms = JBDBMS() + fake_bms.bms_voltage_total_volts = BMSScalarField( + help="Voltage", raw_value=52.0, value="52.00", units="V" + ) + + def _do_main(): + import sys + import time as _t + argv = ["bmspy-server", "--socket", sock_path, "--device", "/dev/ttyUSB0"] + if "debug" in kwargs: + argv += ["-v"] * kwargs["debug"] + + original_listen = socket_module.socket.listen + + def _patched_listen(self, backlog=1): + result = original_listen(self, backlog) + ready_event.set() + return result + + with patch("sys.argv", argv), \ + patch("bmspy.server.signal.signal"), \ + patch("bmspy.server.initialise_serial", return_value=MagicMock()), \ + patch("bmspy.server.collect_data", return_value=fake_bms), \ + patch("bmspy.server.time.sleep"), \ + patch("bmspy.server.os.path.isdir", return_value=True), \ + patch("bmspy.server.os.path.exists", return_value=False), \ + patch.object(socket_module.socket, "listen", _patched_listen): + try: + server_main() + except (SystemExit, KeyboardInterrupt, OSError): + pass + + t = threading.Thread(target=_do_main, daemon=True) + return t + + def _send_command(self, sock_path: str, cmd: dict) -> dict: + """Connect to server socket and send a command.""" + import socket as _socket + import struct + import json + + sock = _socket.socket(_socket.AF_UNIX, _socket.SOCK_STREAM) + # Wait for server to be ready + for _ in range(20): + try: + sock.connect(sock_path) + break + except (OSError, ConnectionRefusedError): + import time + time.sleep(0.1) + + payload = json.dumps(cmd).encode() + sock.sendall(struct.pack("!I", len(payload)) + payload) + + # Read response + raw_len = sock.recv(4) + if not raw_len: + return {} + length = struct.unpack("!I", raw_len)[0] + resp_data = sock.recv(length) + sock.close() + return json.loads(resp_data) + + def test_register_command(self, tmp_path): + """Test REGISTER command via server main().""" + sock_path = str(tmp_path / "server.sock") + ready = threading.Event() + stop = threading.Event() + + t = self._make_server_thread(sock_path, ready, stop) + t.start() + ready.wait(timeout=2) + import time as _time + _time.sleep(0.2) # Let server get to sock.accept() + + try: + response = self._send_command(sock_path, {"command": "REGISTER", "client": "test"}) + assert response.get("status") == "REGISTERED" + finally: + import os + # Trigger server shutdown by connecting and sending KeyboardInterrupt-triggering command + try: + import socket as _s, struct, json + s = _s.socket(_s.AF_UNIX, _s.SOCK_STREAM) + s.connect(sock_path) + # Send DEREGISTER + payload = json.dumps({"command": "DEREGISTER", "client": "test"}).encode() + s.sendall(struct.pack("!I", len(payload)) + payload) + s.recv(4) + s.close() + except Exception: + pass + t.join(timeout=0.5) + + def test_get_command(self, tmp_path): + """Test GET command via server main().""" + sock_path = str(tmp_path / "server_get.sock") + ready = threading.Event() + stop = threading.Event() + + t = self._make_server_thread(sock_path, ready, stop) + t.start() + ready.wait(timeout=2) + import time as _time + _time.sleep(0.2) + + try: + response = self._send_command(sock_path, {"command": "GET", "client": "test"}) + assert "ttyUSB0" in response or len(response) >= 0 + finally: + t.join(timeout=0.5) + + def test_deregister_command(self, tmp_path): + """Test DEREGISTER command via server main().""" + sock_path = str(tmp_path / "server_dereg.sock") + ready = threading.Event() + stop = threading.Event() + + t = self._make_server_thread(sock_path, ready, stop) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + + try: + # First register + r1 = self._send_command(sock_path, {"command": "REGISTER", "client": "test"}) + assert r1.get("status") == "REGISTERED" + # Then deregister + r2 = self._send_command(sock_path, {"command": "DEREGISTER", "client": "test"}) + assert r2.get("status") == "DEREGISTERED" + finally: + t.join(timeout=0.5) + + def test_get_with_ups_filter(self, tmp_path): + """Test GET command with ups filter.""" + sock_path = str(tmp_path / "server_getups.sock") + ready = threading.Event() + stop = threading.Event() + + t = self._make_server_thread(sock_path, ready, stop) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + + try: + response = self._send_command(sock_path, {"command": "GET", "client": "test", "ups": "ttyUSB0"}) + # Should get ttyUSB0 data or empty (ups filter) + assert isinstance(response, dict) + finally: + t.join(timeout=0.5) + + def test_debug_mode_verbose(self, tmp_path, capsys): + """Test server main() with debug=1 logs messages.""" + sock_path = str(tmp_path / "server_dbg.sock") + ready = threading.Event() + stop = threading.Event() + + t = self._make_server_thread(sock_path, ready, stop, debug=1) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + + try: + self._send_command(sock_path, {"command": "GET", "client": "test"}) + except Exception: + pass + finally: + t.join(timeout=0.5) + + def test_duplicate_device_name_skipped(self, tmp_path, capsys): + """Test that duplicate UPS names are skipped.""" + sock_path = str(tmp_path / "server_dup.sock") + ready = threading.Event() + + from unittest.mock import MagicMock, patch + from bmspy.server import main as server_main + from bmspy.classes import BMSScalarField + from bmspy.jbd_bms import JBDBMS + + fake_bms = JBDBMS() + fake_bms.bms_voltage_total_volts = BMSScalarField( + help="Voltage", raw_value=52.0, value="52.00", units="V" + ) + + def _do_main_dup(): + argv = ["bmspy-server", "--socket", sock_path, + "--device", "myups:/dev/ttyUSB0", + "--device", "myups:/dev/ttyUSB1"] # duplicate name + + original_listen = socket_module.socket.listen + + def _patched_listen(self, backlog=1): + result = original_listen(self, backlog) + ready.set() + return result + + with patch("sys.argv", argv), \ + patch("bmspy.server.signal.signal"), \ + patch("bmspy.server.initialise_serial", return_value=MagicMock()), \ + patch("bmspy.server.collect_data", return_value=fake_bms), \ + patch("bmspy.server.time.sleep"), \ + patch("bmspy.server.os.path.isdir", return_value=True), \ + patch("bmspy.server.os.path.exists", return_value=False), \ + patch.object(socket_module.socket, "listen", _patched_listen): + try: + server_main() + except (SystemExit, KeyboardInterrupt, OSError): + pass + + t = threading.Thread(target=_do_main_dup, daemon=True) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + # Server should have started with one device + response = self._send_command(sock_path, {"command": "GET", "client": "test"}) + assert "myups" in response + t.join(timeout=0.5) + + def test_socket_dir_created_if_missing(self, tmp_path): + """Test that socket dir is created when it doesn't exist.""" + sock_path = str(tmp_path / "server_mkdir.sock") + ready = threading.Event() + + from unittest.mock import MagicMock, patch, call + from bmspy.server import main as server_main + from bmspy.jbd_bms import JBDBMS + + fake_bms = JBDBMS() + makedirs_called = [] + + def _do_main_mkdir(): + argv = ["bmspy-server", "--socket", sock_path, "--device", "/dev/ttyUSB0"] + + original_listen = socket_module.socket.listen + + def _patched_listen(self, backlog=1): + result = original_listen(self, backlog) + ready.set() + return result + + def _patched_makedirs(path, exist_ok=False): + makedirs_called.append(path) + # Don't call actual makedirs to avoid recursion - socket dir is already tmp_path + + with patch("sys.argv", argv), \ + patch("bmspy.server.signal.signal"), \ + patch("bmspy.server.initialise_serial", return_value=MagicMock()), \ + patch("bmspy.server.collect_data", return_value=fake_bms), \ + patch("bmspy.server.time.sleep"), \ + patch("bmspy.server.os.path.exists", return_value=False), \ + patch("bmspy.server.os.path.isdir", return_value=False), \ + patch("bmspy.server.os.makedirs", side_effect=_patched_makedirs), \ + patch.object(socket_module.socket, "listen", _patched_listen): + try: + server_main() + except (SystemExit, KeyboardInterrupt, OSError): + pass + + t = threading.Thread(target=_do_main_mkdir, daemon=True) + t.start() + ready.wait(timeout=5) + import time as _t + _t.sleep(0.2) + try: + self._send_command(sock_path, {"command": "REGISTER", "client": "test"}) + except Exception: + pass + t.join(timeout=0.5) + assert len(makedirs_called) > 0 + + def test_debug_3_logs_startup(self, tmp_path, capsys): + """Test debug=3 logs 'starting up' message.""" + sock_path = str(tmp_path / "server_dbg3.sock") + ready = threading.Event() + stop = threading.Event() + + t = self._make_server_thread(sock_path, ready, stop, debug=3) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + + try: + self._send_command(sock_path, {"command": "REGISTER", "client": "test"}) + except Exception: + pass + finally: + t.join(timeout=0.5) + captured = capsys.readouterr() + # debug>2 triggers "starting up" and "waiting for connection" + assert "starting up" in captured.out.lower() or "waiting" in captured.out.lower() + + def test_socket_already_exists_raises(self, tmp_path): + """Test that server raises OSError if socket already exists.""" + sock_path = str(tmp_path / "existing.sock") + + from unittest.mock import MagicMock, patch + from bmspy.server import main as server_main + + with patch("sys.argv", ["bmspy-server", "--socket", sock_path, "--device", "/dev/ttyUSB0"]), \ + patch("bmspy.server.signal.signal"), \ + patch("bmspy.server.initialise_serial", return_value=MagicMock()), \ + patch("bmspy.server.os.path.isdir", return_value=True), \ + patch("bmspy.server.os.path.exists", return_value=True): + with pytest.raises(OSError, match="already exists"): + server_main() + + def test_deregister_nonexistent_client_no_error(self, tmp_path): + """Test DEREGISTER for a client that was never registered (KeyError suppressed).""" + sock_path = str(tmp_path / "server_noerr.sock") + ready = threading.Event() + stop = threading.Event() + + t = self._make_server_thread(sock_path, ready, stop) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + + try: + # Deregister without first registering + response = self._send_command(sock_path, {"command": "DEREGISTER", "client": "ghost"}) + assert response.get("status") == "DEREGISTERED" + finally: + t.join(timeout=0.5) + + def test_keyboard_interrupt_closes_connection(self, tmp_path): + """Test KeyboardInterrupt handler closes connection when connection is active.""" + sock_path = str(tmp_path / "server_kbi.sock") + ready = threading.Event() + + from unittest.mock import MagicMock, patch + from bmspy.server import main as server_main, read_request as original_read_request + from bmspy.jbd_bms import JBDBMS + + fake_bms = JBDBMS() + call_count = [0] + + def _do_main_kbi(): + argv = ["bmspy-server", "--socket", sock_path, "--device", "/dev/ttyUSB0"] + + original_listen = socket_module.socket.listen + + def _patched_listen(self, backlog=1): + result = original_listen(self, backlog) + ready.set() + return result + + def _patched_read_request(conn, debug=0): + call_count[0] += 1 + if call_count[0] >= 2: + raise KeyboardInterrupt("test interrupt") + return original_read_request(conn, debug) + + with patch("sys.argv", argv), \ + patch("bmspy.server.signal.signal"), \ + patch("bmspy.server.initialise_serial", return_value=MagicMock()), \ + patch("bmspy.server.collect_data", return_value=fake_bms), \ + patch("bmspy.server.time.sleep"), \ + patch("bmspy.server.os.path.isdir", return_value=True), \ + patch("bmspy.server.os.path.exists", return_value=False), \ + patch("bmspy.server.read_request", side_effect=_patched_read_request), \ + patch.object(socket_module.socket, "listen", _patched_listen): + try: + server_main() + except SystemExit: + pass + + t = threading.Thread(target=_do_main_kbi, daemon=True) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + # Send first request to register call_count[0] = 1 + try: + self._send_command(sock_path, {"command": "REGISTER", "client": "test"}) + except Exception: + pass + _t.sleep(0.1) + # Send second request to trigger KeyboardInterrupt with active connection + try: + self._send_command(sock_path, {"command": "REGISTER", "client": "test2"}) + except Exception: + pass + t.join(timeout=2) + + def test_socket_read_error_logs_and_continues(self, tmp_path, capsys): + """Test that read_request errors are caught and logged.""" + sock_path = str(tmp_path / "server_err.sock") + ready = threading.Event() + stop = threading.Event() + + t = self._make_server_thread(sock_path, ready, stop) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + + try: + # Connect but send garbage that will cause read_request to fail + import socket as _s + sock = _s.socket(_s.AF_UNIX, _s.SOCK_STREAM) + for _ in range(20): + try: + sock.connect(sock_path) + break + except (OSError, ConnectionRefusedError): + _t.sleep(0.05) + # Send only 2 bytes (incomplete length header) + sock.sendall(b"\x00\x00") + sock.close() + _t.sleep(0.1) # Give server time to process + finally: + t.join(timeout=0.5) + + def test_root_user_socket_dir_created(self, tmp_path, capsys): + """Test root user path when socket dir doesn't exist (chown/chmod triggered).""" + sock_path = str(tmp_path / "server_root_mkdir.sock") + ready = threading.Event() + + from unittest.mock import MagicMock, patch + from bmspy.server import main as server_main + from bmspy.jbd_bms import JBDBMS + + fake_bms = JBDBMS() + + def _do_main(): + argv = ["bmspy-server", "--socket", sock_path, "--device", "/dev/ttyUSB0", "-v", "-v"] + + original_listen = socket_module.socket.listen + + def _patched_listen(self, backlog=1): + result = original_listen(self, backlog) + ready.set() + return result + + mock_pwd_module = MagicMock() + mock_pwd_module.getpwnam.return_value = [None, None, 65534] + mock_pwd_module.getpwuid.return_value = ["nobody"] + mock_grp_module = MagicMock() + mock_grp_module.getgrnam.return_value = [None, None, 65534] + mock_grp_module.getgrgid.return_value = ["dialout"] + + import sys as _sys + + with patch("sys.argv", argv), \ + patch("bmspy.server.signal.signal"), \ + patch("bmspy.server.initialise_serial", return_value=MagicMock()), \ + patch("bmspy.server.collect_data", return_value=fake_bms), \ + patch("bmspy.server.time.sleep"), \ + patch("bmspy.server.os.path.isdir", return_value=False), \ + patch("bmspy.server.os.makedirs"), \ + patch("bmspy.server.os.chown"), \ + patch("bmspy.server.os.chmod"), \ + patch("bmspy.server.os.path.exists", return_value=False), \ + patch("bmspy.server.os.getuid", return_value=0), \ + patch("bmspy.server.os.getgid", return_value=0), \ + patch("bmspy.server.os.setuid"), \ + patch("bmspy.server.os.setgid"), \ + patch("bmspy.server.os.umask", return_value=0o022), \ + patch.dict(_sys.modules, {"pwd": mock_pwd_module, "grp": mock_grp_module}), \ + patch.object(socket_module.socket, "listen", _patched_listen): + try: + server_main() + except (SystemExit, KeyboardInterrupt, OSError): + pass + + t = threading.Thread(target=_do_main, daemon=True) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + try: + self._send_command(sock_path, {"command": "REGISTER", "client": "test"}) + except Exception: + pass + t.join(timeout=0.5) + + def test_root_user_setgid_error(self, tmp_path, capsys): + """Test root user path when setgid raises OSError.""" + sock_path = str(tmp_path / "server_setgid_err.sock") + ready = threading.Event() + + from unittest.mock import MagicMock, patch + from bmspy.server import main as server_main + from bmspy.jbd_bms import JBDBMS + + fake_bms = JGDBMS() if False else JBDBMS() + + def _do_main(): + argv = ["bmspy-server", "--socket", sock_path, "--device", "/dev/ttyUSB0"] + + original_listen = socket_module.socket.listen + + def _patched_listen(self, backlog=1): + result = original_listen(self, backlog) + ready.set() + return result + + mock_pwd_module = MagicMock() + mock_pwd_module.getpwnam.return_value = [None, None, 65534] + mock_pwd_module.getpwuid.return_value = ["nobody"] + mock_grp_module = MagicMock() + mock_grp_module.getgrnam.return_value = [None, None, 65534] + mock_grp_module.getgrgid.return_value = ["dialout"] + + import sys as _sys + + with patch("sys.argv", argv), \ + patch("bmspy.server.signal.signal"), \ + patch("bmspy.server.initialise_serial", return_value=MagicMock()), \ + patch("bmspy.server.collect_data", return_value=JBDBMS()), \ + patch("bmspy.server.time.sleep"), \ + patch("bmspy.server.os.path.isdir", return_value=True), \ + patch("bmspy.server.os.path.exists", return_value=False), \ + patch("bmspy.server.os.getuid", return_value=0), \ + patch("bmspy.server.os.getgid", return_value=0), \ + patch("bmspy.server.os.setgid", side_effect=OSError("cannot set gid")), \ + patch("bmspy.server.os.setuid", side_effect=OSError("cannot set uid")), \ + patch("bmspy.server.os.umask", return_value=0o022), \ + patch.dict(_sys.modules, {"pwd": mock_pwd_module, "grp": mock_grp_module}), \ + patch.object(socket_module.socket, "listen", _patched_listen): + try: + server_main() + except (SystemExit, KeyboardInterrupt, OSError): + pass + + t = threading.Thread(target=_do_main, daemon=True) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + try: + self._send_command(sock_path, {"command": "REGISTER", "client": "test"}) + except Exception: + pass + t.join(timeout=0.5) + captured = capsys.readouterr() + # Should log errors about setgid/setuid + assert "gid" in captured.out.lower() or "uid" in captured.out.lower() + + def test_root_user_uid_gid_handling(self, tmp_path, capsys): + """Test the uid==0 path for privilege dropping.""" + sock_path = str(tmp_path / "server_root.sock") + ready = threading.Event() + + from unittest.mock import MagicMock, patch + from bmspy.server import main as server_main + from bmspy.jbd_bms import JBDBMS + + fake_bms = JBDBMS() + + def _do_main_root(): + argv = ["bmspy-server", "--socket", sock_path, "--device", "/dev/ttyUSB0"] + + original_listen = socket_module.socket.listen + + def _patched_listen(self, backlog=1): + result = original_listen(self, backlog) + ready.set() + return result + + mock_pwd_module = MagicMock() + mock_pwd_module.getpwnam.return_value = [None, None, 65534] # nobody uid + mock_pwd_module.getpwuid.return_value = ["nobody"] + mock_grp_module = MagicMock() + mock_grp_module.getgrnam.return_value = [None, None, 65534] # dialout gid + mock_grp_module.getgrgid.return_value = ["dialout"] + + import sys as _sys + + with patch("sys.argv", argv), \ + patch("bmspy.server.signal.signal"), \ + patch("bmspy.server.initialise_serial", return_value=MagicMock()), \ + patch("bmspy.server.collect_data", return_value=fake_bms), \ + patch("bmspy.server.time.sleep"), \ + patch("bmspy.server.os.path.isdir", return_value=True), \ + patch("bmspy.server.os.path.exists", return_value=False), \ + patch("bmspy.server.os.getuid", return_value=0), \ + patch("bmspy.server.os.getgid", return_value=0), \ + patch("bmspy.server.os.setuid"), \ + patch("bmspy.server.os.setgid"), \ + patch("bmspy.server.os.umask", return_value=0o022), \ + patch.dict(_sys.modules, {"pwd": mock_pwd_module, "grp": mock_grp_module}), \ + patch.object(socket_module.socket, "listen", _patched_listen): + try: + server_main() + except (SystemExit, KeyboardInterrupt, OSError): + pass + + t = threading.Thread(target=_do_main_root, daemon=True) + t.start() + ready.wait(timeout=2) + import time as _t + _t.sleep(0.2) + try: + self._send_command(sock_path, {"command": "REGISTER", "client": "test"}) + except Exception: + pass + t.join(timeout=0.5) + + def test_invalid_command_breaks_loop(self, tmp_path, capsys): + """Test that an invalid command logs an error.""" + sock_path = str(tmp_path / "server_invalid.sock") + ready = threading.Event() + stop = threading.Event() + + t = self._make_server_thread(sock_path, ready, stop) + t.start() + ready.wait(timeout=2) + import time as _time + _time.sleep(0.2) + + try: + # Send invalid command - note: server breaks on invalid, so this may fail + import socket as _s, struct, json + sock = _s.socket(_s.AF_UNIX, _s.SOCK_STREAM) + for _ in range(20): + try: + sock.connect(sock_path) + break + except (OSError, ConnectionRefusedError): + _time.sleep(0.05) + payload = json.dumps({"command": "INVALID", "client": "test"}).encode() + sock.sendall(struct.pack("!I", len(payload)) + payload) + sock.close() + except Exception: + pass + finally: + t.join(timeout=0.5) + + +class TestSendResponseDebug: + def test_debug_3_logs_sending(self, capsys): + srv, cli = socket_module.socketpair() + try: + send_response(srv, {"status": "OK"}, "test", debug=3) + _recv_framed(cli) + finally: + srv.close() + cli.close() + captured = capsys.readouterr() + assert "sending" in captured.out + + def test_debug_5_logs_length(self, capsys): + srv, cli = socket_module.socketpair() + try: + send_response(srv, {"status": "OK"}, "test", debug=5) + _recv_framed(cli) + finally: + srv.close() + cli.close() + captured = capsys.readouterr() + assert "length" in captured.out + + def test_debug_4_logs_response(self, capsys): + srv, cli = socket_module.socketpair() + try: + send_response(srv, {"status": "OK"}, "test", debug=4) + _recv_framed(cli) + finally: + srv.close() + cli.close() + captured = capsys.readouterr() + assert "outgoing response" in captured.out diff --git a/tests/test_ups.py b/tests/test_ups.py new file mode 100644 index 0000000..c7b0c8e --- /dev/null +++ b/tests/test_ups.py @@ -0,0 +1,490 @@ +import pytest +import smtplib +from unittest.mock import patch, MagicMock + +import bmspy.ups as ups_mod +from bmspy.classes import BMSScalarField, BMSMultiField, BMSInfoField, UPS +from bmspy.ups import _get_field_value, handle_shutdown, handle_email + + +# --------------------------------------------------------------------------- +# Fixtures: reset module-level globals between tests +# --------------------------------------------------------------------------- + +@pytest.fixture(autouse=True) +def _reset_globals(): + ups_mod.scheduled_shutdown = False + ups_mod.critical_sent = False + ups_mod.warning_sent = False + ups_mod.alert_sent = False + yield + ups_mod.scheduled_shutdown = False + ups_mod.critical_sent = False + ups_mod.warning_sent = False + ups_mod.alert_sent = False + + +# --------------------------------------------------------------------------- +# _get_field_value +# --------------------------------------------------------------------------- + +class TestGetFieldValue: + def _ups(self): + return UPS.from_dict({ + "bms_current_amps": { + "help": "Current", + "raw_value": -2.5, + "value": "-2.50", + "units": "A", + }, + "bms_capacity_charge_ratio": { + "help": "Percent Charge", + "raw_value": 0.75, + "value": "0.75", + "units": "%", + }, + "bms_date": { + "help": "Date", + "info": "2023-01-15", + }, + "bms_cells": { + "help": "Cells", + "label": "cell", + "raw_values": {1: 3.6}, + "values": {1: "3.600"}, + "units": "V", + }, + }) + + def test_found_scalar_field(self): + ups = self._ups() + assert _get_field_value(ups, "bms_current_amps") == pytest.approx(-2.5) + + def test_found_charge_ratio(self): + ups = self._ups() + assert _get_field_value(ups, "bms_capacity_charge_ratio") == pytest.approx(0.75) + + def test_field_not_found_returns_none(self): + ups = self._ups() + assert _get_field_value(ups, "nonexistent_field") is None + + def test_info_field_returns_none(self): + # BMSInfoField is not a scalar, should return None + ups = self._ups() + assert _get_field_value(ups, "bms_date") is None + + def test_multi_field_returns_none(self): + # BMSMultiField is not a scalar, should return None + ups = self._ups() + assert _get_field_value(ups, "bms_cells") is None + + +# --------------------------------------------------------------------------- +# handle_shutdown +# --------------------------------------------------------------------------- + +class TestHandleShutdown: + def test_shutdown_calls_os_system(self): + with patch("bmspy.ups.os.system") as mock_sys: + handle_shutdown(action="shutdown", delay=5) + mock_sys.assert_called_once_with("/sbin/shutdown 5") + + def test_shutdown_sets_scheduled_flag(self): + with patch("bmspy.ups.os.system"): + handle_shutdown(action="shutdown", delay=5) + assert ups_mod.scheduled_shutdown is not False + + def test_shutdown_does_not_reschedule_if_already_scheduled(self): + ups_mod.scheduled_shutdown = 9999999999.0 + with patch("bmspy.ups.os.system") as mock_sys: + handle_shutdown(action="shutdown", delay=5) + mock_sys.assert_not_called() + + def test_cancel_calls_os_system(self): + with patch("bmspy.ups.os.system") as mock_sys: + handle_shutdown(action="cancel") + mock_sys.assert_called_once_with("/sbin/shutdown -c") + + def test_cancel_does_not_set_flag(self): + with patch("bmspy.ups.os.system"): + handle_shutdown(action="cancel") + assert ups_mod.scheduled_shutdown is False + + +# --------------------------------------------------------------------------- +# handle_email +# --------------------------------------------------------------------------- + +class TestHandleEmail: + def _mock_smtp(self): + mock_server = MagicMock() + mock_smtp_class = MagicMock() + mock_smtp_class.return_value.__enter__ = MagicMock(return_value=mock_server) + mock_smtp_class.return_value.__exit__ = MagicMock(return_value=False) + return mock_smtp_class, mock_server + + def test_basic_send(self): + mock_smtp_class, mock_server = self._mock_smtp() + with patch("bmspy.ups.smtplib.SMTP", mock_smtp_class): + handle_email( + text="test message", + level="Alert", + recipient="user@example.com", + ) + mock_server.sendmail.assert_called_once() + + def test_ssl_port_triggers_starttls(self): + mock_smtp_class, mock_server = self._mock_smtp() + with patch("bmspy.ups.smtplib.SMTP", mock_smtp_class), \ + patch("bmspy.ups.ssl.create_default_context") as mock_ssl: + handle_email( + text="test", + level="Alert", + recipient="user@example.com", + port=465, + ) + mock_server.starttls.assert_called_once() + + def test_port_587_also_triggers_starttls(self): + mock_smtp_class, mock_server = self._mock_smtp() + with patch("bmspy.ups.smtplib.SMTP", mock_smtp_class), \ + patch("bmspy.ups.ssl.create_default_context"): + handle_email( + text="test", + level="Alert", + recipient="user@example.com", + port=587, + ) + mock_server.starttls.assert_called_once() + + def test_no_ssl_no_starttls(self): + mock_smtp_class, mock_server = self._mock_smtp() + with patch("bmspy.ups.smtplib.SMTP", mock_smtp_class): + handle_email( + text="test", + level="Alert", + recipient="user@example.com", + port=25, + ) + mock_server.starttls.assert_not_called() + + def test_with_credentials_calls_login(self): + mock_smtp_class, mock_server = self._mock_smtp() + with patch("bmspy.ups.smtplib.SMTP", mock_smtp_class): + handle_email( + text="test", + level="Alert", + recipient="user@example.com", + mailuser="user", + mailpass="pass", + ) + mock_server.login.assert_called_once_with("user", "pass") + + def test_without_credentials_no_login(self): + mock_smtp_class, mock_server = self._mock_smtp() + with patch("bmspy.ups.smtplib.SMTP", mock_smtp_class): + handle_email( + text="test", + level="Alert", + recipient="user@example.com", + ) + mock_server.login.assert_not_called() + + def test_recipient_without_at_gets_hostname_appended(self): + mock_smtp_class, mock_server = self._mock_smtp() + with patch("bmspy.ups.smtplib.SMTP", mock_smtp_class), \ + patch("bmspy.ups.socket.gethostname", return_value="myhost"): + handle_email( + text="test", + level="Alert", + recipient="root", + ) + # sendmail recipient should be root@myhost + args = mock_server.sendmail.call_args[0] + assert args[1] == "root@myhost" + + +# --------------------------------------------------------------------------- +# ups main() - comprehensive loop testing +# --------------------------------------------------------------------------- + +def _make_ups(current_amps: float, charge_ratio: float) -> UPS: + """Build a UPS with bms_current_amps and bms_capacity_charge_ratio.""" + return UPS.from_dict({ + "bms_current_amps": { + "help": "Current", + "raw_value": current_amps, + "value": str(current_amps), + "units": "A", + }, + "bms_capacity_charge_ratio": { + "help": "Charge Ratio", + "raw_value": charge_ratio, + "value": str(charge_ratio), + "units": "%", + }, + }) + + +class TestUpsMain: + """Test ups.main() loop behavior by running a limited number of iterations.""" + + def _run_main_with_data(self, data_sequence, argv=None, extra_patches=None): + """Run main() with a sequence of UPS data, stopping when data runs out.""" + call_count = 0 + + def _read_data(*args, **kwargs): + nonlocal call_count + if call_count >= len(data_sequence): + raise StopIteration("done") + result = data_sequence[call_count] + call_count += 1 + return result + + patches = { + "bmspy.ups.client.read_data": _read_data, + "bmspy.ups.client.handle_registration": MagicMock(), + "bmspy.ups.time.sleep": MagicMock(), + "bmspy.ups.os.system": MagicMock(), + } + if extra_patches: + patches.update(extra_patches) + + with patch("sys.argv", ["bmspy-ups"] + (argv or [])): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=_read_data), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.os.system"): + ups_mod.main() + return call_count + + def _make_data(self, current_amps, charge_ratio): + return {"testups": _make_ups(current_amps, charge_ratio)} + + def test_main_history_pruned_at_10(self): + """History is pruned to max 10 items.""" + # Send 15 readings to ensure pruning happens + data_seq = [self._make_data(0.0, 0.80)] * 15 + + with patch("sys.argv", ["bmspy-ups"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown"), \ + patch("bmspy.ups.handle_email"): + ups_mod.main() + + def test_main_runs_with_not_enough_history(self): + """With < 4 readings, should print 'not enough readings' and continue.""" + data = [self._make_data(0.0, 0.95)] * 5 + self._run_main_with_data(data) + + def test_main_debug_2_prints_not_enough_readings(self, capsys): + data = [self._make_data(0.0, 0.95)] * 5 + with patch("sys.argv", ["bmspy-ups", "-v", "-v"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data + [StopIteration]),\ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.os.system"): + ups_mod.main() + + def test_main_below_critical_threshold_triggers_shutdown(self): + """When charge drops below critical threshold, shutdown should be triggered.""" + # Need 5+ readings (>3 in history) to trigger comparisons + ups_mod.scheduled_shutdown = False + ups_mod.critical_sent = False + data_seq = [] + # Populate history with 5 readings all at 20% (below 30% critical) + for _ in range(5): + data_seq.append(self._make_data(0.0, 0.20)) + + mock_shutdown = MagicMock() + mock_email = MagicMock() + with patch("sys.argv", ["bmspy-ups", "--critical", "30"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown", mock_shutdown), \ + patch("bmspy.ups.handle_email", mock_email): + ups_mod.main() + mock_shutdown.assert_called() + + def test_main_below_warning_threshold_sends_email(self): + """When charge drops below warning threshold, email alert should be sent.""" + ups_mod.scheduled_shutdown = False + ups_mod.warning_sent = False + data_seq = [] + # 5 readings at 60% (below 75% warning, above 30% critical) + for _ in range(5): + data_seq.append(self._make_data(0.0, 0.60)) + + mock_email = MagicMock() + with patch("sys.argv", ["bmspy-ups", "--warning", "75", "--critical", "30"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown"), \ + patch("bmspy.ups.handle_email", mock_email): + ups_mod.main() + mock_email.assert_called() + + def test_main_discharge_alert(self): + """When current goes negative, discharge alert should be sent. + + Discharge alert requires: current < 0 AND h1 < 0 AND h2 >= 0. + After 5 reads with seq [+1, -1, +1, +1, -1]: + h=[+1,-1,+1,+1,-1], current=-1, h1=-1, h2=+1 → triggers alert. + """ + ups_mod.alert_sent = False + data_seq = [ + self._make_data(1.0, 0.90), # r1: +1 + self._make_data(-1.0, 0.89), # r2: -1 (will be h1) + self._make_data(1.0, 0.90), # r3: +1 (will be h2) + self._make_data(1.0, 0.90), # r4: +1 + self._make_data(-1.0, 0.88), # r5: -1 (current) → alert! + ] + + mock_email = MagicMock() + with patch("sys.argv", ["bmspy-ups"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown"), \ + patch("bmspy.ups.handle_email", mock_email): + ups_mod.main() + mock_email.assert_called() + + def test_main_power_regained_alert(self): + """When current goes from negative to positive, recovery alert sent. + + Recovery requires: current >= 0 AND h1 >= 0 AND h2 < 0. + After 5 reads with seq [-1, +1, -1, +1, +1]: + h=[-1,+1,-1,+1,+1], current=+1, h1=+1, h2=-1 → recovery! + """ + ups_mod.alert_sent = True + ups_mod.scheduled_shutdown = False + data_seq = [ + self._make_data(-1.0, 0.80), # r1: -1 + self._make_data(1.0, 0.81), # r2: +1 (will be h1) + self._make_data(-1.0, 0.79), # r3: -1 (will be h2) + self._make_data(1.0, 0.81), # r4: +1 + self._make_data(1.0, 0.82), # r5: +1 (current) → recovery! + ] + + mock_email = MagicMock() + mock_shutdown = MagicMock() + with patch("sys.argv", ["bmspy-ups"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown", mock_shutdown), \ + patch("bmspy.ups.handle_email", mock_email): + ups_mod.main() + mock_email.assert_called() + + def test_main_not_enough_history_debug_2(self, capsys): + """With debug=2 and < 4 readings, prints 'not enough readings'.""" + data_seq = [self._make_data(0.0, 0.95)] * 5 + with patch("sys.argv", ["bmspy-ups", "-v", "-v"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown"), \ + patch("bmspy.ups.handle_email"): + ups_mod.main() + captured = capsys.readouterr() + assert "not enough readings" in captured.out.lower() + + def test_main_below_warning_debug_1(self, capsys): + """With debug=1, warning message is printed.""" + ups_mod.warning_sent = False + data_seq = [self._make_data(0.0, 0.60)] * 5 + with patch("sys.argv", ["bmspy-ups", "-v", "--warning", "75", "--critical", "30"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown"), \ + patch("bmspy.ups.handle_email"): + ups_mod.main() + captured = capsys.readouterr() + assert "warning" in captured.out.lower() + + def test_main_discharge_alert_debug_1(self, capsys): + """With debug=1, discharge alert message is printed.""" + ups_mod.alert_sent = False + data_seq = [ + self._make_data(1.0, 0.90), + self._make_data(-1.0, 0.89), + self._make_data(1.0, 0.90), + self._make_data(1.0, 0.90), + self._make_data(-1.0, 0.88), + ] + with patch("sys.argv", ["bmspy-ups", "-v"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown"), \ + patch("bmspy.ups.handle_email"): + ups_mod.main() + captured = capsys.readouterr() + assert "discharging" in captured.out.lower() + + def test_main_power_regained_debug_1(self, capsys): + """With debug=1, power regained message is printed.""" + ups_mod.alert_sent = True + data_seq = [ + self._make_data(-1.0, 0.80), + self._make_data(1.0, 0.81), + self._make_data(-1.0, 0.79), + self._make_data(1.0, 0.81), + self._make_data(1.0, 0.82), + ] + with patch("sys.argv", ["bmspy-ups", "-v"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown"), \ + patch("bmspy.ups.handle_email"): + ups_mod.main() + captured = capsys.readouterr() + assert "power regained" in captured.out.lower() + + def test_main_debug_2_prints_current_and_capacity(self, capsys): + """With debug=2, current and capacity are printed.""" + data_seq = [self._make_data(1.0, 0.80)] * 5 + with patch("sys.argv", ["bmspy-ups", "-v", "-v"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown"), \ + patch("bmspy.ups.handle_email"): + ups_mod.main() + captured = capsys.readouterr() + assert "current" in captured.out.lower() or "capacity" in captured.out.lower() + + def test_main_debug_1_prints_thresholds(self, capsys): + """With debug=1 and below threshold, threshold messages are printed.""" + ups_mod.critical_sent = False + data_seq = [self._make_data(0.0, 0.20)] * 5 + with patch("sys.argv", ["bmspy-ups", "-v", "--critical", "30"]): + with pytest.raises(StopIteration): + with patch("bmspy.ups.client.read_data", side_effect=data_seq + [StopIteration("done")]), \ + patch("bmspy.ups.client.handle_registration"), \ + patch("bmspy.ups.time.sleep"), \ + patch("bmspy.ups.handle_shutdown"), \ + patch("bmspy.ups.handle_email"): + ups_mod.main() + captured = capsys.readouterr() + assert "critical" in captured.out.lower() or "threshold" in captured.out.lower() diff --git a/tests/test_utilities.py b/tests/test_utilities.py new file mode 100644 index 0000000..910cab9 --- /dev/null +++ b/tests/test_utilities.py @@ -0,0 +1,26 @@ +from bmspy.utilities import debugger + + +class TestDebugger: + def test_prints_message(self, capsys): + debugger("hello world") + captured = capsys.readouterr() + assert "hello world" in captured.out + + def test_includes_timestamp(self, capsys): + debugger("test") + captured = capsys.readouterr() + # timestamp is prepended + assert "test" in captured.out + assert len(captured.out.strip()) > len("test") + + def test_pretty_true(self, capsys): + debugger({"key": "value"}, pretty=True) + captured = capsys.readouterr() + # pretty-printed output includes the data key + assert "key" in captured.out + + def test_pretty_false_no_pprint(self, capsys): + debugger("simple message", pretty=False) + captured = capsys.readouterr() + assert "simple message" in captured.out