feat: pydantic (#182)
* feat: pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> Squashed commit of the following: commit f7cdffc2c124d1f2a4517588364b818795bc729d Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 22:32:27 2022 +0300 docs: canonical representation Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit f0bffb4118d2936fa2f7ff759d218f706168fd61 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 22:04:33 2022 +0300 docs: remove duplicate deprecated module warnings Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit a9bc2cee634503d41ee257c039817fca0de164d8 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 22:02:54 2022 +0300 docs: fix grammar Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit 8b4f3db9e2c23c3d1ba68c0b3b1f0ea55e2972f5 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri Aug 12 15:43:02 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 685e43d77d23e20f9f8272aefe29405d3249ef68 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:35:59 2022 +0300 test: exclude import testing Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit f69bcd2759df7fc3ea16421947316191832fcfcb Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:33:48 2022 +0300 docs: simplify specversion documentation Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 6199278600d60ab3f36dd45f93e8cc3ca03f88b5 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:33:14 2022 +0300 docs: specversion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 44de28b6d2ce9ae4c0cfff47967a86d9e2da36af Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:30:45 2022 +0300 refactor: optimize imports Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 4a6be338cc29e86cde7c2ce224d5b0127e142af9 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:29:28 2022 +0300 refactor: optimize imports Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 8615073ee4617895c41e097bdc4ecb868f8d0eb5 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:24:03 2022 +0300 refactor: remove anyt Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit f03d23b39b2a8554321c9b71cc2a988a7c26d1f6 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:22:15 2022 +0300 feat: import is_binary and is_structured from converts module Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit b920645df88676a74341ba32ec4dd914855b5aa2 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:21:49 2022 +0300 style: formatting Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 0dbd63e713cb26fc951c205ad740f166d76df84d Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:18:50 2022 +0300 docs: cleanup license Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 9fdef9480af3e3af277af6df4ea7ccff6a98a02a Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:41:52 2022 +0300 build: fixate python version Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit de47cc8412984cf22a75044ef63daa1c23cb4b18 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri Aug 12 15:23:31 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 7be086530bd19748867a221313a221284b1679bb Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:23:24 2022 +0300 docs: improve best effort serialization docs Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit a55d60676e15ce83867be9f8c72f44d03d559773 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:22:49 2022 +0300 docs: fix grammar Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit 4d68ec402dbe3e4bac08fcdf821e07b49b321541 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:22:36 2022 +0300 docs: remove uneeded spacing Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit 9b3537e89f2bd3cabab21373266fc7c3f113afcf Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri Aug 12 15:17:32 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 49635fe180b9ebdf49d77536869ee2d3601c8324 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:15:37 2022 +0300 docs: incompatible arguments error Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 909b72e612cbabe0bbf104a36df8d98b475bff30 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 18:14:24 2022 +0300 docs: pydantic not installed exception Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 141f9090f490757dec6453aa22f207329a616877 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri Aug 12 13:57:31 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit d487124a648bd9b6bdb50f81794f2fff63e01016 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 12 16:56:46 2022 +0300 build: pin pydantic version on python 3.6 Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit a46feba840f99c5a86575d7df074798126b66ef3 Merge: 21368b547818a9
Author: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> Date: Thu Aug 11 12:28:57 2022 +0300 Merge branch 'main' into feature/pydantic commit 21368b5e123664810a03f19f06d3255be79b9e2e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Wed Aug 10 20:26:52 2022 +0300 feat: raise indicative error on non-installed pydantic feature Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 65745f351856b82fc9e0781307cb2d597bea7f26 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Wed Aug 10 20:26:36 2022 +0300 feat: pydantic feature not installed exception Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit ab218e7568d9c9ed51e74edfc30f2f820d9eb4cf Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon Aug 8 22:10:56 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit a026d319daa39fad7621affb1deeef6b6d7793e1 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 01:10:16 2022 +0300 fix: test int correctly Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit c49afe41c071be8f6052b6198b419bb57609e26c Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 01:08:57 2022 +0300 test: incompatible types Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit fb74ae39a255adf0f23fe4d0920d902aedf8dd11 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon Aug 8 21:38:12 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 9300c005a6647704601a48b92e591e371c2f3737 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 00:37:05 2022 +0300 test: backwards compatability with calling Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 15ccc350b5d8154dd3bce1af9de2a2fa9a803996 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 00:25:53 2022 +0300 test: test is structured backwards compatability Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit bfe441866a4a9371516114214f19649d445756ef Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 00:24:42 2022 +0300 test: improve is binary test Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit aa9a69dd1690d3f02a9fb7932a23756874548702 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 00:13:51 2022 +0300 stlye: formatting Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit fb81f310124a7711a3145df0a69282441f7c1e7c Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 00:13:00 2022 +0300 fix: remove code duplication Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 650dd1634cd3df74d56cd35faac0528067245832 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 00:11:56 2022 +0300 docs: explain why dependency what it is Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit b2780791314e46a918848de2aae9e778927a5441 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 00:10:15 2022 +0300 build: explicitly specify pydantic version Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 29e13ca9a67f39eefaad6ed1ca82317927ad8123 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 00:05:54 2022 +0300 docs: update example Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 42a4f016e5377041ba60bf631f4c413793fcf188 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Aug 9 00:04:59 2022 +0300 docs: init function Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit e01c2b707473cf7fe1c56124d97cbd95da3ef10e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 23:58:10 2022 +0300 docs: explain why we ignore the data Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 5ddadf4e5bd158a93bdd1a2037a66e629c530126 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 23:53:32 2022 +0300 refactor: use custom exception type Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 8889abbcd233d4a244ccae4a3b56c42a1e31b24a Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 23:51:38 2022 +0300 feat: incompatible arguments error Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit a4dda34d41338cd80b3b821c9c3f5c5f5bcd5d2f Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 23:46:41 2022 +0300 refactor: use value error instead of assertion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 61f68a5f5c3ff81b46c05204af67a6fcf5a1f873 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon Aug 8 20:43:10 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 1630fc36dbf161d8a0767a332f88606cd66bc394 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 23:41:37 2022 +0300 feat: add examples to field values Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit e019c42194b8f07f45e84d49f8e463ff3c6a6faa Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 23:38:37 2022 +0300 fix: example data Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 9b48f6e7270eb253cce7b8d24561f608a717c911 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 23:04:48 2022 +0300 docs: improve pydantic cloudevent base class Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 6605fa822540e0291da221fba128dc7db9c54e8b Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 23:04:22 2022 +0300 style: formatting Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 39a3ba22c0bde0c5dba919ead1f3ba82f09df033 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 23:02:47 2022 +0300 docs: dumps and loads funcions Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 6d59e2902ed46cc1cdca8886e2f615d85a1b629b Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:46:17 2022 +0300 fix: pydantic dumps bugs Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 614496f5875b35e0e103a9b4f3df7e6a4a53c7cb Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:39:15 2022 +0300 Revert "refactor: make best effort serialize to json public" This reverts commit cdf7e2ebb5c92c9a7d362a5d6b2fb16aab0461a3. Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit cdf7e2ebb5c92c9a7d362a5d6b2fb16aab0461a3 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:35:31 2022 +0300 refactor: make best effort serialize to json public Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 75aa8436c3e6bd1865b326c5168c4e2e8ba4be27 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:33:49 2022 +0300 feat: add args and kwargs to best effort serialize to json Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit e74ae8149280cbe7d56f11d1458af8bec5a9e37e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:32:14 2022 +0300 test: pydantic json event regression bug Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 9f2e0c6e962b55f8a0683ee936b8a443ddb533c3 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:23:46 2022 +0300 perf: use http event for ce_json_* functions Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 8af3ed1c48b278b14cdd127ba06c1f653bd3c4ba Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:20:01 2022 +0300 refactor: _best_effort_serialize_to_json type information also includes docs Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 20a4e0a1fabbd6d59d371d7340d93d1c01f732b0 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:13:35 2022 +0300 refactor: rename marshaller functions Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 9d7da629b64d84b0e99fffe306680ec023b1c39b Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:06:20 2022 +0300 fix: bad type information Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit b3f5bbc573baea1127c1390b1291956f43fba183 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:05:03 2022 +0300 docs: add module deprecation comments Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 6882ada4f2dec848c521eda3e41f72290b80748d Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:04:03 2022 +0300 docs: add module deprecation comments Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 53e6dec5c1ab8161049ad185b5fedc82090c670f Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:03:32 2022 +0300 docs: add module deprecation comments Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 169d024cfb2372003b93e7ac33c409aef5f06759 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:02:44 2022 +0300 docs: add module deprecation comments Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 3d4b0c94d7182ac444cabf85b3ccda23c7afa813 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 22:01:42 2022 +0300 refactor: use deprecation function Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 5c39cc0add47806e5bffb6550f2a762c484672ba Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 21:59:54 2022 +0300 refactor: use deprecation functions Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 064e2e8cef0c0cb41c837bfb018c037a2f83185b Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 21:57:17 2022 +0300 refactor: use deprecation functions Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 6ea1e54f8ea13b3a520e83991c9b129ef47b272e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 21:54:44 2022 +0300 refactor: deprecation functions Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 71a06b6179b8d7142f4bd5c7690c2119d4448cb5 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 21:46:18 2022 +0300 docs: default time selection algorithm Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 3fcd085ff4ab6ec289f7c5f80ff369e03784c20e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 21:46:04 2022 +0300 docs: default id selection algorithm Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 3fdef87ef11d36945b527ad083409b895d249993 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 21:41:24 2022 +0300 docs: license Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 363679837cc7153b5cfdcb9b4aefa16d21e2c9fa Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Aug 8 21:32:39 2022 +0300 docs: improve documentation Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 53d1931387bb0b565cb1e76f5ddd5b25b0fdf002 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 23:21:45 2022 +0300 docs: conversion documentation Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 050ed7536b8797ae9f752715006bdc9d59d9b767 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 23:19:37 2022 +0300 docs: fix line length Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit bd70199a02551490f4533e773d7434af22daa711 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 23:15:05 2022 +0300 refactor: add best_effort suffix for clerefication Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 14ed5616b25a0fcf4498a5b6347865327cf66762 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 23:14:18 2022 +0300 docs: encode_attribute value Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 6baf7d0726aed09b1394b8e4b36bbecafafa82d9 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 23:09:10 2022 +0300 refactor: move attributes to variable Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 3a77b1e446973d43e46db58e421323a11dde26f6 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun Aug 7 20:10:03 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 8ab108ac7221fdf1561965d37f21264558cb53da Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 23:09:54 2022 +0300 docs: _json_or_string Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit 4778c109543b7419fd443e436e32eb2d8ced4f1a Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun Aug 7 20:06:11 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 4809c75578e6b1058a69368fc8066a9056161b7a Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 23:06:03 2022 +0300 docs: from_dict better description Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit a538834fc5b49c34246c27637dd68afe1895a06b Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun Aug 7 20:04:20 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit f1d09a2dd2f1922b1226d31d6fefb6b9bdbc1d68 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 23:04:11 2022 +0300 docs: is_structured better description Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit 4cf7559aec29d77d4aa4bb29dd7b705a4e01ad56 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun Aug 7 20:01:56 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 1efab9149991adf2afa42bcd8a38d62c932827e0 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 23:01:48 2022 +0300 docs: is_binary Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit 8e44b2462226e24fe28837758a808b68c73a91ec Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun Aug 7 19:32:36 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit f9956d4d2d9935ee4e1a5f0f96bbd87a25044120 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Aug 7 22:32:27 2022 +0300 docs: canonical representation Co-authored-by: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> commit 42578aff4d07c2e4fc5030c57077b96c72eee3a7 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Aug 6 15:11:45 2022 +0300 fix: circular dependency Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 6b90af97f077d1cfae9912754092b0b6354a3a5b Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat Aug 6 12:01:59 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 789fb64fcff83593ba3c73104f2a08620b26962e Merge: 4e60121785bfe7
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Aug 6 15:02:07 2022 +0300 Merge branch 'main' into feature/pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> # Conflicts: # cloudevents/abstract/event.py # cloudevents/conversion.py # cloudevents/http/event.py # cloudevents/http/http_methods.py # cloudevents/http/json_methods.py commit 4e60121514f31fdc538ae45a9ca00c2651334e4d Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri Aug 5 14:18:33 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 58c18f2237efc8765a12d7183a5889739cb7f9e7 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 5 17:14:39 2022 +0300 refactor: convert get_data and get_attributes to private member Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit c1e9105dea7ce9ea1a715d8583c32bfdc55afe2f Merge: d73311e96c41a1
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 5 17:12:59 2022 +0300 Merge branch 'feature/abstract-cloudevent' into feature/pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit96c41a15ca
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 5 17:11:12 2022 +0300 build: ignore not-implemented functions in coverage Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit4e00b55062
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Aug 5 17:09:17 2022 +0300 refactor: convert get_data and get_attributes to private member functions instead of classmethods Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit d73311e44203d9d2aabbb378a131da2f7941deb7 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:30:55 2022 +0300 test: remove unused variable Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 82aa0d41f727c61f0ec4b8cb72f08c34166653d8 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:30:24 2022 +0300 style: formatting Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit f376bb51e6c70b0f2827775adaf5865d0b2ed789 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:29:42 2022 +0300 style: formatting Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 5c6a511e2e234097b1b9ae782e7010c587d1f8a9 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:26:56 2022 +0300 style: formatting Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit dbb8263e28ae2725773e7e6225a68f4aa8c30dcc Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:25:33 2022 +0300 test: add backwards compatibility tests Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 7eb8c9991cad818d282380e44a9107dc732298ca Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:22:25 2022 +0300 refactor: use direct imports Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 175084a01a851e5237413bdbed482087ee752515 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:21:51 2022 +0300 test: http event dict serialization Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit dec8244fb9d22a1b18dccde0b229c3fec6760775 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:19:49 2022 +0300 refactor: use direct imports Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit fdf4e8124eb1b35784c74f79e8e0ace6a613be9e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:16:47 2022 +0300 test: fix to_dict bug Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit adfbd40a92ccb7dd2f83472c79ef8216f548bb47 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:16:10 2022 +0300 refactor: gut util module Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 9024c83a7897e655ad363bb8ce6a9679707c9faf Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:13:07 2022 +0300 refactor: remove problematic mappings module Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit ee34c0e744d0d263efbd69750c72386db477d194 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:05:18 2022 +0300 style: formatting Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 73d35da6229d6ab3243685c2775e34abbadf3098 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:03:06 2022 +0300 fix: order confusion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 8ef16850d291f72c8f4e4aa90364a0feef491304 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:01:45 2022 +0300 fix: remove uneeded symbol Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 7238465ecd282ba63d3fa9a2b70f5a0118599771 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 02:00:34 2022 +0300 fix: circular imports Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 618d2182aa9fba80a8dc9e88aff9612360014b76 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:59:38 2022 +0300 fix: from_dict order confusion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit f4c7f729db256d403b7943e2a7a2b62a69ffdc70 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:58:42 2022 +0300 refactor: move is structured to sdk Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit e11913bfcdf2900c3045c109ee576b1a090bf5c9 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:57:25 2022 +0300 refactor: move is_binary to sdk Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 067e046204c16878e31a4f213ae4402866fc2415 Merge: 48d7d680c2bafc
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:55:32 2022 +0300 Merge branch 'feature/abstract-cloudevent' into feature/pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> # Conflicts: # cloudevents/http/http_methods.py # cloudevents/http/json_methods.py commit0c2bafc423
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:53:52 2022 +0300 refactor: remove optional type Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 48d7d68686f630ee0f1f31283a33900b4174878e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:50:22 2022 +0300 refactor: move all methods to conversion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 81905e73050f0ba89ff5ba4aa6a47257aad7aadb Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:43:46 2022 +0300 refactor: move json methods to conversion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 474bf4368d0e540fee0bdfa632d01c81a16223d1 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:42:25 2022 +0300 refactor: merge conversion logic under conversion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit a8156274a8fc5ebe9af45a0b25bf9f78b10273e6 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:37:28 2022 +0300 feat: init default cloudevent Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 523e1cb331f1131390581389ded2e6de762087e6 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:37:12 2022 +0300 docs: dict conversion functions Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 88c168932b97e3a73d02238e81a2e87328f69469 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:35:20 2022 +0300 refactor: move dict methods to conversion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit b6e008a338b1e4fd5a1d805792a12131a88ce99a Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:30:38 2022 +0300 fix: broken merge Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 2e9e255322064001e04c91fba6d96d89c2da1859 Merge: 316a9fcfbc0632
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:27:27 2022 +0300 Merge branch 'feature/abstract-cloudevent' into feature/pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> # Conflicts: # cloudevents/abstract/json_methods.py # cloudevents/conversion.py # cloudevents/http/event.py # cloudevents/http/http_methods.py # cloudevents/http/json_methods.py # cloudevents/http/util.py commitfbc063244b
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:26:10 2022 +0300 refactor: use classmethods Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commita8872b9808
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:06:06 2022 +0300 test: remove broken tests Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit065ef91277
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 01:02:17 2022 +0300 refactor: expose data and attributes in class Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commitc0b54130c6
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 00:56:01 2022 +0300 refactor: remove mutation variables from contract Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit1109bc5b76
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 00:55:34 2022 +0300 docs: remove inconsistent types Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit6a9201647c
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 00:54:22 2022 +0300 refactor: add default value for conversions Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit5d0882d8b9
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 00:50:04 2022 +0300 test: rename badly named test Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit41c5f5984b
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 00:48:37 2022 +0300 refactor: move all abstract conversion logic under conversion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commitf47087d490
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 25 00:32:16 2022 +0300 Revert "refactor: rename abstract to generic" This reverts commit89d30eb23d
. Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commitea19f7dbd6
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 23:10:53 2022 +0300 test: fix broken test Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commitba16cdd3ac
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 23:10:43 2022 +0300 refactor: cloudevent is no longer absctract Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commitd303eaecab
Merge:89d30eb
61c8657
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 23:06:37 2022 +0300 Merge branch 'main' into feature/abstract-cloudevent Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> # Conflicts: # CHANGELOG.md # cloudevents/http/event.py # cloudevents/tests/test_http_cloudevent.py commit89d30eb23d
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 23:04:58 2022 +0300 refactor: rename abstract to generic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commita22efbde37
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 23:00:36 2022 +0300 test: add abstract cloudevent coverage tests Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit2b3c0f1292
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 22:04:25 2022 +0300 docs: add missing comment to from_http Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit62595ffc3b
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 22:02:48 2022 +0300 docs: explain why impl has no public attributes property Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commitb9e8763594
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 21:59:53 2022 +0300 docs: not implemented errors Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commitecf9418a1b
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 21:56:02 2022 +0300 docs: explain read model Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit1187600b1b
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 21:51:32 2022 +0300 docs: better cloudevent explenation Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commitfb4f993536
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 21:50:22 2022 +0300 docs: getitem documentation Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit3845aa7295
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 24 21:48:38 2022 +0300 refactor: use anycloudevent for generics Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 316a9fca85a16f5771cf1cac7723d8711f3ada87 Merge: 8072e61a96bd6c
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 01:22:39 2022 +0300 Merge branch 'feature/abstract-cloudevent' into feature/pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commita96bd6cdde
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 01:22:31 2022 +0300 feat: define abstract methods Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 8072e6110cbca2206e72a267f007e1e28f564c3c Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 01:18:30 2022 +0300 docs: wording Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit e6b5c9c66d7774f9b993164e96b98dba1eed07b6 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 01:17:51 2022 +0300 refactor: explicit optional Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit e51926c4d2e05c620f964b4cb5047bd5dec19dd7 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 01:16:27 2022 +0300 refactor: use anystr Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 115c7f5223c4d4346c23786df7b0303a3b30ab4e Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri Jul 22 22:14:15 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 60c00065679ddbd285898ada54a63459c514caa2 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 01:14:02 2022 +0300 test: remove pytest fixture parameterization Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 88f7ae58e7828c5b71b92e3cc3005a8a9ee2632e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 01:08:46 2022 +0300 feat: remove strict event Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 982436c65b72ec46112645ede6fc9cdbe56ea6e4 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 01:08:07 2022 +0300 Revert "fix: strict event did not inherit descriptions" This reverts commit 63975cd67e5bdbc6889327914c1b78d3cd430aa7. Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> # Conflicts: # cloudevents/pydantic/event.py # cloudevents/pydantic/strict_event.py commit f569c541cf3f4d1850f5841504a90c087283766a Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri Jul 22 21:59:25 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 43ecfeea816b2a98b6d2087e6c7d327817baed11 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 00:58:05 2022 +0300 refactor: remove uneeded code Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 154f7674533fa32f1789ed157353cc5d4ee1bceb Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 00:43:43 2022 +0300 refactor: integrate abstract event Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 896299b66df63791258a4dc5594c30843ec76dae Merge: d03467709062e3
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 00:40:46 2022 +0300 Merge branch 'feature/abstract-cloudevent' into feature/pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit09062e35ff
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 00:40:40 2022 +0300 fix: intengrate data read model Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit d034677da266080c49a91cb857d9b660cb508111 Merge: fb5165e5648968
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 00:39:03 2022 +0300 Merge branch 'feature/abstract-cloudevent' into feature/pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit56489682c5
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 00:38:56 2022 +0300 feat: simplify data attributes Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit fb5165eb6c980fa4091dae66871e719e0b2a5aec Merge: af83fb001041e7
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 00:28:21 2022 +0300 Merge branch 'feature/abstract-cloudevent' into feature/pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> # Conflicts: # CHANGELOG.md # cloudevents/http/event.py # cloudevents/tests/test_http_cloudevent.py commit01041e7cd5
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 00:23:39 2022 +0300 docs: abstract cloudevent Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit6588577ffc
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 23 00:17:07 2022 +0300 refactor: create abstract cloudevent package Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commitc747f59a29
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:31:06 2022 +0300 refactor: integrate abstract event Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commitf1ff00908e
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:58:52 2022 +0300 refactor: move to abstract Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit4488201812
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:40:05 2022 +0300 feat: any cloud event Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit2b6483046a
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:38:49 2022 +0300 feat: create function Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit5f8399fa09
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:31:55 2022 +0300 feat: add missing return type Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit41a9af2874
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:30:57 2022 +0300 feat: abstract event Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit af83fb084cdd882a607982ad6352446804f45252 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:08:55 2022 +0300 fix: use python 3 type hints Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 771d2ab147e1755feb5cc0c2ee36edabb076e5e1 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:07:44 2022 +0300 test: explicit value names Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 899e81b670719a45bfc3fa2ff673da4ce90a46a5 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:04:53 2022 +0300 fix: make specversion comperable to strings Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 476d8226cf1b1ca6c6bd9e12cb9b380084f259ae Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:02:29 2022 +0300 docs: make return value more precise Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 9896252a7b999d199c58d788fbc6e4bedb3aac53 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 23:00:00 2022 +0300 refactor: merge attributes to signle module Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 4be431f47fb3a06febe1bf73807a4ff754d722f7 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 22:53:30 2022 +0300 build: explicit pydantic version Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit e44e99687d03b717de0a9fe3abe43d4bdbf02c6f Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 22:46:51 2022 +0300 feat: remove content type from strict event Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit eeb608cbfdbb23740cc90c701d9d4d3c20b8d5e4 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Fri Jul 22 22:46:22 2022 +0300 build: move pydantic tox deps to test.txt Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 63975cd67e5bdbc6889327914c1b78d3cd430aa7 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 02:40:09 2022 +0300 fix: strict event did not inherit descriptions Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 53ab87b817ce995894ce5b41cb6b775491e87105 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon Jul 18 23:20:43 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 50a7fb506eecaba04434519eac49cfd5927d0929 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 02:20:31 2022 +0300 stlye: formatting Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit a7af0363228bab5309258ec720fda6bf21fe0ddf Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 02:19:39 2022 +0300 test: strict cloudevent Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit bdfb997e7fa5a5e00ba442fc2d3251c8c05aebf5 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 02:14:47 2022 +0300 test: pydantic json methods Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 1f580ecefbaf529a00da7a60820fab7e63de5da1 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 02:14:29 2022 +0300 fix: use correct import Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 030e7c0daa74592dfe32689c85c2f9fa8171f6b9 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 02:11:09 2022 +0300 test: pydantic events integration Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 92cb622bfe2f6230c9184fed05843cfda544bcc2 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 02:06:48 2022 +0300 fix: encode attribute access Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 9d334563c2febdeda2776a7f02e8ed8278b1e96d Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 02:05:45 2022 +0300 feat: make encode attribute value public Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 100c78905ecf96c9afc01702f524426f77d882ff Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:57:38 2022 +0300 feat: strict event Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 703fe1a78f5bb024d2b0d9e6cdc099e42c493d00 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:57:34 2022 +0300 feat: lax event requirments Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit f2c9bc4af56b243e62949a99bbe890f069833fcc Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:50:48 2022 +0300 feat: add more proxy imports Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit e8163a9bc1e1a3cff3b03ff20cb41a868c8d283e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:48:25 2022 +0300 test: data not in dummy event Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit c3c9c7d1d3bfa56750da99f79a1c18d5d1efc105 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:46:55 2022 +0300 test: fix broken dummy values Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit bac4f19e6289137da53618476005985c4276cefe Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon Jul 18 22:42:35 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 5f7c7b0b9620fbc841856fb43bfff4de7ca9ac95 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:37:28 2022 +0300 test: repr Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 593fa84428c5f0238cbce22461b85ea4eb62a664 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:35:36 2022 +0300 test: event length Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 0259e46aa4df676c015cf666bae7e5577c8be803 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:35:21 2022 +0300 fix: incorrect iteration Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit bafcec8c2923e3f02a1138578dd04cb35673a36a Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:30:56 2022 +0300 Revert "refactor: better iter type signature" This reverts commit 8bb3e76bf15d925ee5b5ac80e045d320f0bfbaa3. Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 8bec7b3dd014e0849a128c3ef5865f9b11bc94d5 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:28:53 2022 +0300 test: item access Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 8bb3e76bf15d925ee5b5ac80e045d320f0bfbaa3 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:23:14 2022 +0300 refactor: better iter type signature Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 37fdeec23bf136e771dc30195564a4bc77860a2f Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:15:27 2022 +0300 docs: cloudevent methods Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit e0ad1ae47261e7276f086fb06aa1730b055d72d4 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:09:37 2022 +0300 docs: fix typo Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 0095236d29e46adef34e1a80a1deb9deeb471557 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:09:23 2022 +0300 docs: fix typo Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 3eb1fe165527fdbc77b33b01ed8090f701022a51 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon Jul 18 22:04:19 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 96d5b66b082b962b35895d48a073567d607d9ed2 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 01:03:13 2022 +0300 test: add xfail on the json content type Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 643ed7692184dc0cebb04ba92350779ffd15c66c Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon Jul 18 21:19:50 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit efb5950b30129b78726bc601fae81c15225fdf97 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 00:18:52 2022 +0300 test: json or string Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 3307e2df6b6b21f6a37c29baa9829246ea4d7d3c Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 00:13:05 2022 +0300 refactor: better type information Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 6f336804dc33e844d54aed1385e3f2db516401da Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 00:10:16 2022 +0300 fix: add optional to signature Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit cb29c54effbf1e0dde28b08d426c67c67c58e705 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:55:25 2022 +0300 fix: add missing decode exception Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 412d1912c133b52851061646c9cf765b63c1c0e1 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Tue Jul 19 00:02:14 2022 +0300 fix: return str not bytes Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 00cc4e3ed232354a518887eeb2e998a7b021acbf Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:56:58 2022 +0300 fix: use correct iteration Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit c7693a1066a7bed4939d7f9fd23f80054d1f630e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:47:26 2022 +0300 fix: normalize datetime Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 0adbc5e08d752a8ec0a1c72e9d3f9b5e95f2092f Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:45:54 2022 +0300 refactor: simplify ce json Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 61025385ec677d61790716a4040094c83104d382 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:44:08 2022 +0300 refactor: simplify http adapter Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit f3f22f175821560b3fc5681120e61e1e1d0a30e4 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:42:58 2022 +0300 feat: dict methods Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 08ab2ce7a61023069c6cbdc2f66d20c033e693c4 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:31:44 2022 +0300 feat: add type information for init Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 914bbcc18c296fcdf924b11442c21d8208f579d4 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:29:56 2022 +0300 fix: normalize enums Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit aeddc2e120a82a83dbb9adbad72614a9bc00b9b8 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:22:34 2022 +0300 fix: remove *args Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 50f985d36f822295cb8c73e8a9eb0e5f5b93fe22 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:21:55 2022 +0300 refactor: move json format methods to event module to prevent confusion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 73c0ada30fc7b037aca1fafd54bf4f7908e9ccd2 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:12:11 2022 +0300 feat: http methods Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 016a3d63a65f7e7f25121401bd2a875daf005fb6 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:11:59 2022 +0300 docs: license Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 388b27837adc3cba781a3accdd546ef5350d404b Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:06:32 2022 +0300 refactor: json methods to use http json methods Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 41a653937db75f6044e0e358c4228fea8561f6ee Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 23:05:48 2022 +0300 style: formatting Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 03fcc8df2661c8d9969b701b7affbc13e5e175f3 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 22:57:49 2022 +0300 feat: simplify json functions Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit cb88107c9c2bbd81e0ab5c372b5777faddf2eb4e Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 22:57:36 2022 +0300 feat: from http event Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit cabcf2a02fb1d7debb635818a8bf74207078a94f Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Mon Jul 18 22:50:24 2022 +0300 feat: http adapter Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 09fd02f727cd639ca6d5c7f3b0c579fe627ea5c5 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 22:01:57 2022 +0300 test: fix tests to adjust to specversion changes Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit c3c6f63a15d549aa24449b96248d957afa7a9c81 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 21:59:05 2022 +0300 fix: imports Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit d0253111eda0425df2779ad61777f5093c9c3437 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 21:56:26 2022 +0300 feat: spec version enum Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit dcd3871f502fe69293407ad97eb2ec5946334819 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 21:50:57 2022 +0300 refactor: split defaults module to attribute modules Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit fc0d718bcac9ec155a8d290fbfae21a4bd04bb82 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 21:45:44 2022 +0300 fix: every absolute uri is a uri reference Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 82e3439b8efb8a478d10f7425062a02f1bef7d07 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 21:44:42 2022 +0300 docs: explain why cannot use pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit fbdf8fd5c48449bb6fead21ad1dfd7ec5f335a8a Merge: eb32f0a 3bcf126 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 21:38:16 2022 +0300 Merge remote-tracking branch 'origin/feature/pydantic' into feature/pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit eb32f0a910e8baded4549af6e07cf21538938470 Merge: 81935fc0a95e63
Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 21:38:03 2022 +0300 Merge remote-tracking branch 'upstream/main' into feature/pydantic Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 3bcf126a46857a27d46aefba2d456d853a18cde8 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun Jul 17 18:36:12 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci commit 81935fcdf760222483f23728ce83be388974a623 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 21:30:14 2022 +0300 test: remove unused import Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 5452151b330d463f4eaf6d91ffc77e6c9d031db7 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun Jul 17 18:16:39 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit a849f536283836d2b66aa951b9fefce18999415a Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 21:12:28 2022 +0300 build: add missing pydantic dep Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit ce2526522b2e8f84e82e326ab744858179bf93eb Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sun Jul 17 21:09:10 2022 +0300 style: formatting Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 9870c3c90a6f978d2137374aafb3b477ad9e2378 Author: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> Date: Fri Jul 15 11:22:29 2022 +0300 ci: migrate to `main` branch (#180) * ci: migrate to `main` branch Signed-off-by: Yurii Serhiichuk <savik.ne@gmail.com> * docs: mentioned default branch change in the changelog Signed-off-by: Yurii Serhiichuk <savik.ne@gmail.com> Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit be213912bcb8f5d308a8748442f7990d479672db Author: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> Date: Thu Jul 14 12:11:16 2022 +0300 release: v1.4.0 (#179) Signed-off-by: Yurii Serhiichuk <savik.ne@gmail.com> Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 84b488ac8a50131dd82c618cee6869d7be231366 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Thu Jul 14 00:10:08 2022 +0300 fix __eq__ operator raises attribute error on non-cloudevent values (#172) * fix: non-cloudevents values must not equal to cloudevents values (#171) Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> * test: refactor move fixtures to beginning Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> * test: cloudevent equality bug regression (#171) Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> * style: remove redundent else Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> * test: remove redundent test Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> * test: refactor non_cloudevent_value into a parameterization Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> * docs: update changelog Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * docs: fix bad merge Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 396c011a24964398e7d885bd13b441bb75b3a8e2 Author: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> Date: Mon Jul 11 20:05:45 2022 +0300 chore: drop `docs` and related files (#168) * chore: drop `docs` and related files Signed-off-by: Yurii Serhiichuk <savik.ne@gmail.com> * docs: update changelog Signed-off-by: Yurii Serhiichuk <savik.ne@gmail.com> Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit faff6dca07eec7f4e7bfbf5b5308c440e8424f65 Author: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat Jul 16 12:24:07 2022 +0000 [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 9d8b6df94fa4ccbf70d060d9531a3830a101a196 Author: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> Date: Fri Jul 15 11:22:29 2022 +0300 ci: migrate to `main` branch (#180) * ci: migrate to `main` branch Signed-off-by: Yurii Serhiichuk <savik.ne@gmail.com> * docs: mentioned default branch change in the changelog Signed-off-by: Yurii Serhiichuk <savik.ne@gmail.com> Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit fa540c714781f641615282a57cca369d89f456d9 Author: Yurii Serhiichuk <xSAVIKx@users.noreply.github.com> Date: Thu Jul 14 12:11:16 2022 +0300 release: v1.4.0 (#179) Signed-off-by: Yurii Serhiichuk <savik.ne@gmail.com> commit 573098232524d9dbb627615cdd0cdd42834dbed0 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 16 15:16:38 2022 +0300 style: sort imports Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 14fdbfcc760ea6a0c2e00c8760eecc4132942685 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 16 15:14:34 2022 +0300 feat: add more examples Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 49bd752b1efac4ba25826beb1ff3e09642f40352 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 16 15:04:51 2022 +0300 test: binary data deserialization Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit c5a8b8668029a68dbe3e6d27b2f876da2ee566c0 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 16 15:02:30 2022 +0300 fix: raise correct exception type to prevent confusion Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 0e075ae22531c042d89874c56e8d5076f81d8894 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 16 14:57:42 2022 +0300 test: binary data serialization Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit b325caeec49fcb1d2cd0e125881bec49e137e0a7 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 16 14:57:23 2022 +0300 fix: forbid api mixing Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit f07169dff83dd9d830cf9f927d0c922a8c5aaefa Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 16 14:47:06 2022 +0300 test: json content type serialization Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 525dee0ddeb2bf035e13383e29994e3ef785e761 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 16 14:39:16 2022 +0300 fix: incorrect behaviour for mirroring Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> commit 29a48598877562a5f8ad392bea51ceb4c4815343 Author: Alexander Tkachev <sasha64sasha@gmail.com> Date: Sat Jul 16 14:33:37 2022 +0300 test: pydantic cloudevent Signed-off-by: Alexander Tkachev <sasha64sasha@gmai… * docs: include pydantic feature to changelog Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> * docs: add deprecations to changelog Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci Signed-off-by: Alexander Tkachev <sasha64sasha@gmail.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
parent
47818a980d
commit
f5bb285d96
17
CHANGELOG.md
17
CHANGELOG.md
|
@ -6,6 +6,22 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
## [Unreleased]
|
||||
|
||||
### Added
|
||||
- A new `CloudEvent` optional `pydantic` model class is available in the
|
||||
`cloudevents.pydantic.event` module. The new model enables the integration of
|
||||
CloudEvents in your existing pydantic models or integration with pydantic
|
||||
dependent systems such as FastAPI. ([#182])
|
||||
|
||||
### Changed
|
||||
- Deprecated `cloudevents.http.event_type` module,
|
||||
moved under `cloudevents.sdk.converters`.
|
||||
- Deprecated `cloudevents.http.json_methods` module,
|
||||
moved under `cloudevents.http.conversion`.
|
||||
- Deprecated `cloudevents.http.http_methods` module,
|
||||
moved under `cloudevents.http.conversion`.
|
||||
- Deprecated `cloudevents.http.util` module.
|
||||
|
||||
|
||||
|
||||
## [1.5.0] — 2022-08-06
|
||||
### Added
|
||||
|
@ -180,5 +196,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
[#172]: https://github.com/cloudevents/sdk-python/pull/172
|
||||
[#173]: https://github.com/cloudevents/sdk-python/pull/173
|
||||
[#180]: https://github.com/cloudevents/sdk-python/pull/180
|
||||
[#182]: https://github.com/cloudevents/sdk-python/pull/182
|
||||
[#184]: https://github.com/cloudevents/sdk-python/pull/184
|
||||
[#186]: https://github.com/cloudevents/sdk-python/pull/186
|
||||
|
|
|
@ -23,15 +23,44 @@
|
|||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import datetime
|
||||
import enum
|
||||
import json
|
||||
import typing
|
||||
|
||||
from cloudevents import exceptions as cloud_exceptions
|
||||
from cloudevents.abstract import AnyCloudEvent
|
||||
from cloudevents.http import is_binary
|
||||
from cloudevents.http.mappings import _marshaller_by_format, _obj_by_version
|
||||
from cloudevents.http.util import _json_or_string
|
||||
from cloudevents.sdk import converters, marshaller, types
|
||||
from cloudevents.sdk.converters import is_binary
|
||||
from cloudevents.sdk.event import v1, v03
|
||||
|
||||
|
||||
def _best_effort_serialize_to_json(
|
||||
value: typing.Any, *args, **kwargs
|
||||
) -> typing.Optional[typing.Union[bytes, str, typing.Any]]:
|
||||
"""
|
||||
Serializes the given value into a JSON-encoded string.
|
||||
|
||||
Given a None value returns None as is.
|
||||
Given a non-JSON-serializable value returns return the value as is.
|
||||
|
||||
:param value: The value to be serialized into a JSON string.
|
||||
:return: JSON string of the given value OR None OR given value.
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return json.dumps(value, *args, **kwargs)
|
||||
except TypeError:
|
||||
return value
|
||||
|
||||
|
||||
_default_marshaller_by_format = {
|
||||
converters.TypeStructured: lambda x: x,
|
||||
converters.TypeBinary: _best_effort_serialize_to_json,
|
||||
} # type: typing.Dict[str, types.MarshallerType]
|
||||
|
||||
_obj_by_version = {"1.0": v1.Event, "0.3": v03.Event}
|
||||
|
||||
|
||||
def to_json(
|
||||
|
@ -169,7 +198,7 @@ def _to_http(
|
|||
:returns: (http_headers: dict, http_body: bytes or str)
|
||||
"""
|
||||
if data_marshaller is None:
|
||||
data_marshaller = _marshaller_by_format[format]
|
||||
data_marshaller = _default_marshaller_by_format[format]
|
||||
|
||||
if event["specversion"] not in _obj_by_version:
|
||||
raise cloud_exceptions.InvalidRequiredFields(
|
||||
|
@ -222,3 +251,76 @@ def to_binary(
|
|||
format=converters.TypeBinary,
|
||||
data_marshaller=data_marshaller,
|
||||
)
|
||||
|
||||
|
||||
def best_effort_encode_attribute_value(value: typing.Any) -> typing.Any:
|
||||
"""
|
||||
SHOULD convert any value into a JSON serialization friendly format.
|
||||
|
||||
This function acts in a best-effort manner and MAY not actually encode the value
|
||||
if it does not know how to do that, or the value is already JSON-friendly.
|
||||
|
||||
:param value: Value which MAY or MAY NOT be JSON serializable.
|
||||
:return: Possibly encoded value.
|
||||
"""
|
||||
if isinstance(value, enum.Enum):
|
||||
return value.value
|
||||
if isinstance(value, datetime.datetime):
|
||||
return value.isoformat()
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def from_dict(
|
||||
event_type: typing.Type[AnyCloudEvent],
|
||||
event: typing.Dict[str, typing.Any],
|
||||
) -> AnyCloudEvent:
|
||||
"""
|
||||
Constructs an Event object of a given `event_type` from
|
||||
a dict `event` representation.
|
||||
|
||||
:param event: The event represented as a dict.
|
||||
:param event_type: The type of the event to be constructed from the dict.
|
||||
:returns: The event of the specified type backed by the given dict.
|
||||
"""
|
||||
attributes = {
|
||||
attr_name: best_effort_encode_attribute_value(attr_value)
|
||||
for attr_name, attr_value in event.items()
|
||||
if attr_name != "data"
|
||||
}
|
||||
return event_type.create(attributes=attributes, data=event.get("data"))
|
||||
|
||||
|
||||
def to_dict(event: AnyCloudEvent) -> typing.Dict[str, typing.Any]:
|
||||
"""
|
||||
Converts given `event` to its canonical dictionary representation.
|
||||
|
||||
:param event: The event to be converted into a dict.
|
||||
:returns: The canonical dict representation of the event.
|
||||
"""
|
||||
result = {attribute_name: event.get(attribute_name) for attribute_name in event}
|
||||
result["data"] = event.data
|
||||
return result
|
||||
|
||||
|
||||
def _json_or_string(
|
||||
content: typing.Optional[typing.AnyStr],
|
||||
) -> typing.Optional[
|
||||
typing.Union[
|
||||
typing.Dict[typing.Any, typing.Any],
|
||||
typing.List[typing.Any],
|
||||
typing.AnyStr,
|
||||
]
|
||||
]:
|
||||
"""
|
||||
Returns a JSON-decoded dictionary or a list of dictionaries if
|
||||
a valid JSON string is provided.
|
||||
|
||||
Returns the same `content` in case of an error or `None` when no content provided.
|
||||
"""
|
||||
if content is None:
|
||||
return None
|
||||
try:
|
||||
return json.loads(content)
|
||||
except (json.JSONDecodeError, TypeError, UnicodeDecodeError):
|
||||
return content
|
||||
|
|
|
@ -39,3 +39,16 @@ class DataMarshallerError(GenericException):
|
|||
|
||||
class DataUnmarshallerError(GenericException):
|
||||
pass
|
||||
|
||||
|
||||
class IncompatibleArgumentsError(GenericException):
|
||||
"""
|
||||
Raised when a user tries to call a function with arguments which are incompatible
|
||||
with each other.
|
||||
"""
|
||||
|
||||
|
||||
class PydanticFeatureNotInstalled(GenericException):
|
||||
"""
|
||||
Raised when a user tries to use the pydantic feature but did not install it.
|
||||
"""
|
||||
|
|
|
@ -12,13 +12,18 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from cloudevents.http.event import CloudEvent # noqa
|
||||
from cloudevents.http.event_type import is_binary, is_structured # noqa
|
||||
from cloudevents.http.http_methods import ( # noqa
|
||||
|
||||
from cloudevents.http.conversion import ( # noqa
|
||||
from_dict,
|
||||
from_http,
|
||||
from_json,
|
||||
to_binary,
|
||||
to_binary_http,
|
||||
to_dict,
|
||||
to_json,
|
||||
to_structured,
|
||||
to_structured_http,
|
||||
)
|
||||
from cloudevents.http.json_methods import from_json, to_json # noqa
|
||||
from cloudevents.http.event import CloudEvent # noqa
|
||||
from cloudevents.http.http_methods import to_binary_http # deprecated # noqa
|
||||
from cloudevents.http.http_methods import to_structured_http # deprecated # noqa
|
||||
from cloudevents.sdk.converters.binary import is_binary # noqa
|
||||
from cloudevents.sdk.converters.structured import is_structured # noqa
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
import typing
|
||||
|
||||
from cloudevents.conversion import from_dict as _abstract_from_dict
|
||||
from cloudevents.conversion import from_http as _abstract_from_http
|
||||
from cloudevents.conversion import from_json as _abstract_from_json
|
||||
from cloudevents.conversion import to_binary, to_dict, to_json, to_structured # noqa
|
||||
from cloudevents.http.event import CloudEvent
|
||||
from cloudevents.sdk import types
|
||||
|
||||
|
||||
def from_json(
|
||||
data: typing.Union[str, bytes],
|
||||
data_unmarshaller: types.UnmarshallerType = None,
|
||||
) -> CloudEvent:
|
||||
"""
|
||||
Parses JSON string `data` into a CloudEvent.
|
||||
|
||||
:param data: JSON string representation of a CloudEvent.
|
||||
:param data_unmarshaller: Callable function that casts `data` to a
|
||||
Python object.
|
||||
:returns: A CloudEvent parsed from the given JSON representation.
|
||||
"""
|
||||
return _abstract_from_json(CloudEvent, data, data_unmarshaller)
|
||||
|
||||
|
||||
def from_http(
|
||||
headers: typing.Dict[str, str],
|
||||
data: typing.Union[str, bytes, None],
|
||||
data_unmarshaller: types.UnmarshallerType = None,
|
||||
) -> CloudEvent:
|
||||
"""
|
||||
Parses CloudEvent `data` and `headers` into a CloudEvent`.
|
||||
|
||||
The method supports both binary and structured representations.
|
||||
|
||||
:param headers: The HTTP request headers.
|
||||
:param data: The HTTP request body. If set to None, "" or b'', the returned
|
||||
event's `data` field will be set to None.
|
||||
:param data_unmarshaller: Callable function to map data to a python object
|
||||
e.g. lambda x: x or lambda x: json.loads(x)
|
||||
:returns: A CloudEvent instance parsed from the passed HTTP parameters of
|
||||
the specified type.
|
||||
"""
|
||||
return _abstract_from_http(CloudEvent, headers, data, data_unmarshaller)
|
||||
|
||||
|
||||
def from_dict(
|
||||
event: typing.Dict[str, typing.Any],
|
||||
) -> CloudEvent:
|
||||
"""
|
||||
Constructs a CloudEvent from a dict `event` representation.
|
||||
|
||||
:param event: The event represented as a dict.
|
||||
:returns: The event of the specified type backed by the given dict.
|
||||
"""
|
||||
return _abstract_from_dict(CloudEvent, event)
|
|
@ -18,7 +18,12 @@ import uuid
|
|||
|
||||
import cloudevents.exceptions as cloud_exceptions
|
||||
from cloudevents import abstract
|
||||
from cloudevents.http.mappings import _required_by_version
|
||||
from cloudevents.sdk.event import v1, v03
|
||||
|
||||
_required_by_version = {
|
||||
"1.0": v1.Event._ce_required_fields,
|
||||
"0.3": v03.Event._ce_required_fields,
|
||||
}
|
||||
|
||||
|
||||
class CloudEvent(abstract.CloudEvent):
|
||||
|
@ -41,11 +46,11 @@ class CloudEvent(abstract.CloudEvent):
|
|||
attributes 'specversion', 'id' or 'time', this will create
|
||||
those attributes with default values.
|
||||
e.g. {
|
||||
"content-type": "application/cloudevents+json",
|
||||
"id": "16fb5f0b-211e-1102-3dfe-ea6e2806f124",
|
||||
"source": "<event-source>",
|
||||
"type": "cloudevent.event.type",
|
||||
"specversion": "0.2"
|
||||
"specversion": "1.0",
|
||||
"type": "com.github.pull_request.opened",
|
||||
"source": "https://github.com/cloudevents/spec/pull",
|
||||
"id": "A234-1234-1234",
|
||||
"time": "2018-04-05T17:31:00Z",
|
||||
}
|
||||
:type attributes: typing.Dict[str, str]
|
||||
:param data: The payload of the event, as a python object
|
||||
|
|
|
@ -11,33 +11,27 @@
|
|||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import typing
|
||||
|
||||
from cloudevents.sdk.converters import binary, structured
|
||||
from deprecation import deprecated
|
||||
|
||||
from cloudevents.sdk.converters import is_binary as _moved_is_binary
|
||||
from cloudevents.sdk.converters import is_structured as _moved_is_structured
|
||||
|
||||
# THIS MODULE IS DEPRECATED, YOU SHOULD NOT ADD NEW FUNCTIONALLY HERE
|
||||
|
||||
|
||||
@deprecated(
|
||||
deprecated_in="1.6.0",
|
||||
details="Use cloudevents.sdk.converters.is_binary function instead",
|
||||
)
|
||||
def is_binary(headers: typing.Dict[str, str]) -> bool:
|
||||
"""Uses internal marshallers to determine whether this event is binary
|
||||
:param headers: the HTTP headers
|
||||
:type headers: typing.Dict[str, str]
|
||||
:returns bool: returns a bool indicating whether the headers indicate
|
||||
a binary event type
|
||||
"""
|
||||
headers = {key.lower(): value for key, value in headers.items()}
|
||||
content_type = headers.get("content-type", "")
|
||||
binary_parser = binary.BinaryHTTPCloudEventConverter()
|
||||
return binary_parser.can_read(content_type=content_type, headers=headers)
|
||||
return _moved_is_binary(headers)
|
||||
|
||||
|
||||
@deprecated(
|
||||
deprecated_in="1.6.0",
|
||||
details="Use cloudevents.sdk.converters.is_structured function instead",
|
||||
)
|
||||
def is_structured(headers: typing.Dict[str, str]) -> bool:
|
||||
"""Uses internal marshallers to determine whether this event is structured
|
||||
:param headers: the HTTP headers
|
||||
:type headers: typing.Dict[str, str]
|
||||
:returns bool: returns a bool indicating whether the headers indicate
|
||||
a structured event type
|
||||
"""
|
||||
headers = {key.lower(): value for key, value in headers.items()}
|
||||
content_type = headers.get("content-type", "")
|
||||
structured_parser = structured.JSONHTTPCloudEventConverter()
|
||||
return structured_parser.can_read(content_type=content_type, headers=headers)
|
||||
return _moved_is_structured(headers)
|
||||
|
|
|
@ -16,46 +16,58 @@ import typing
|
|||
|
||||
from deprecation import deprecated
|
||||
|
||||
from cloudevents.conversion import from_http as _abstract_from_http
|
||||
from cloudevents.conversion import to_binary, to_structured
|
||||
from cloudevents.abstract import AnyCloudEvent
|
||||
from cloudevents.http.conversion import from_http as _moved_from_http
|
||||
from cloudevents.http.conversion import to_binary as _moved_to_binary
|
||||
from cloudevents.http.conversion import to_structured as _moved_to_structured
|
||||
from cloudevents.http.event import CloudEvent
|
||||
from cloudevents.sdk import types
|
||||
|
||||
# THIS MODULE IS DEPRECATED, YOU SHOULD NOT ADD NEW FUNCTIONALLY HERE
|
||||
|
||||
|
||||
@deprecated(
|
||||
deprecated_in="1.6.0",
|
||||
details="Use cloudevents.http.to_binary function instead",
|
||||
)
|
||||
def to_binary(
|
||||
event: AnyCloudEvent, data_marshaller: types.MarshallerType = None
|
||||
) -> typing.Tuple[dict, typing.Union[bytes, str]]:
|
||||
return _moved_to_binary(event, data_marshaller)
|
||||
|
||||
|
||||
@deprecated(
|
||||
deprecated_in="1.6.0",
|
||||
details="Use cloudevents.http.to_structured function instead",
|
||||
)
|
||||
def to_structured(
|
||||
event: AnyCloudEvent,
|
||||
data_marshaller: types.MarshallerType = None,
|
||||
) -> typing.Tuple[dict, typing.Union[bytes, str]]:
|
||||
return _moved_to_structured(event, data_marshaller)
|
||||
|
||||
|
||||
@deprecated(
|
||||
deprecated_in="1.6.0",
|
||||
details="Use cloudevents.http.from_http function instead",
|
||||
)
|
||||
def from_http(
|
||||
headers: typing.Dict[str, str],
|
||||
data: typing.Union[str, bytes, None],
|
||||
data_unmarshaller: types.UnmarshallerType = None,
|
||||
) -> CloudEvent:
|
||||
"""
|
||||
Unwrap a CloudEvent (binary or structured) from an HTTP request.
|
||||
:param headers: the HTTP headers
|
||||
:type headers: typing.Dict[str, str]
|
||||
:param data: the HTTP request body. If set to None, "" or b'', the returned
|
||||
event's data field will be set to None
|
||||
:type data: typing.IO
|
||||
:param data_unmarshaller: Callable function to map data to a python object
|
||||
e.g. lambda x: x or lambda x: json.loads(x)
|
||||
:type data_unmarshaller: types.UnmarshallerType
|
||||
"""
|
||||
return _abstract_from_http(CloudEvent, headers, data, data_unmarshaller)
|
||||
|
||||
|
||||
# backwards compatibility
|
||||
to_binary = to_binary
|
||||
# backwards compatibility
|
||||
to_structured = to_structured
|
||||
return _moved_from_http(headers, data, data_unmarshaller)
|
||||
|
||||
|
||||
@deprecated(deprecated_in="1.0.2", details="Use to_binary function instead")
|
||||
def to_binary_http(
|
||||
event: CloudEvent, data_marshaller: types.MarshallerType = None
|
||||
) -> typing.Tuple[dict, typing.Union[bytes, str]]:
|
||||
return to_binary(event, data_marshaller)
|
||||
return _moved_to_binary(event, data_marshaller)
|
||||
|
||||
|
||||
@deprecated(deprecated_in="1.0.2", details="Use to_structured function instead")
|
||||
def to_structured_http(
|
||||
event: CloudEvent, data_marshaller: types.MarshallerType = None
|
||||
) -> typing.Tuple[dict, typing.Union[bytes, str]]:
|
||||
return to_structured(event, data_marshaller)
|
||||
return _moved_to_structured(event, data_marshaller)
|
||||
|
|
|
@ -14,26 +14,34 @@
|
|||
|
||||
import typing
|
||||
|
||||
from cloudevents.conversion import from_json as _abstract_from_json
|
||||
from cloudevents.conversion import to_json
|
||||
from cloudevents.http.event import CloudEvent
|
||||
from deprecation import deprecated
|
||||
|
||||
from cloudevents.abstract import AnyCloudEvent
|
||||
from cloudevents.http import CloudEvent
|
||||
from cloudevents.http.conversion import from_json as _moved_from_json
|
||||
from cloudevents.http.conversion import to_json as _moved_to_json
|
||||
from cloudevents.sdk import types
|
||||
|
||||
# THIS MODULE IS DEPRECATED, YOU SHOULD NOT ADD NEW FUNCTIONALLY HERE
|
||||
|
||||
|
||||
@deprecated(
|
||||
deprecated_in="1.6.0",
|
||||
details="Use cloudevents.http.to_json function instead",
|
||||
)
|
||||
def to_json(
|
||||
event: AnyCloudEvent,
|
||||
data_marshaller: types.MarshallerType = None,
|
||||
) -> typing.Union[str, bytes]:
|
||||
return _moved_to_json(event, data_marshaller)
|
||||
|
||||
|
||||
@deprecated(
|
||||
deprecated_in="1.6.0",
|
||||
details="Use cloudevents.http.from_json function instead",
|
||||
)
|
||||
def from_json(
|
||||
data: typing.Union[str, bytes],
|
||||
data_unmarshaller: types.UnmarshallerType = None,
|
||||
) -> CloudEvent:
|
||||
"""
|
||||
Cast json encoded data into an CloudEvent
|
||||
:param data: json encoded cloudevent data
|
||||
:param data_unmarshaller: Callable function which will cast data to a
|
||||
python object
|
||||
:type data_unmarshaller: typing.Callable
|
||||
:returns: CloudEvent representing given cloudevent json object
|
||||
"""
|
||||
return _abstract_from_json(CloudEvent, data, data_unmarshaller)
|
||||
|
||||
|
||||
# backwards compatibility
|
||||
to_json = to_json
|
||||
return _moved_from_json(data, data_unmarshaller)
|
||||
|
|
|
@ -11,36 +11,18 @@
|
|||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from deprecation import deprecated
|
||||
|
||||
import json
|
||||
import typing
|
||||
from cloudevents.conversion import ( # noqa
|
||||
_best_effort_serialize_to_json as _moved_default_marshaller,
|
||||
)
|
||||
|
||||
# THIS MODULE IS DEPRECATED, YOU SHOULD NOT ADD NEW FUNCTIONALLY HERE
|
||||
|
||||
|
||||
@deprecated(
|
||||
deprecated_in="1.6.0",
|
||||
details="You SHOULD NOT use the default marshaller",
|
||||
)
|
||||
def default_marshaller(content: any):
|
||||
if content is None:
|
||||
return None
|
||||
try:
|
||||
return json.dumps(content)
|
||||
except TypeError:
|
||||
return content
|
||||
|
||||
|
||||
def _json_or_string(
|
||||
content: typing.Optional[typing.AnyStr],
|
||||
) -> typing.Optional[
|
||||
typing.Union[
|
||||
typing.Dict[typing.Any, typing.Any],
|
||||
typing.List[typing.Any],
|
||||
typing.AnyStr,
|
||||
]
|
||||
]:
|
||||
"""
|
||||
Given an encoded JSON string MUST return decoded JSON object.
|
||||
Otherwise, MUST return the given string as-is.
|
||||
"""
|
||||
if content is None:
|
||||
return None
|
||||
try:
|
||||
return json.loads(content)
|
||||
except (json.JSONDecodeError, TypeError, UnicodeDecodeError):
|
||||
return content
|
||||
return _moved_default_marshaller(content)
|
||||
|
|
|
@ -11,19 +11,13 @@
|
|||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from cloudevents.http.util import default_marshaller
|
||||
from cloudevents.sdk import converters
|
||||
from cloudevents.sdk.event import v1, v03
|
||||
|
||||
_marshaller_by_format = {
|
||||
converters.TypeStructured: lambda x: x,
|
||||
converters.TypeBinary: default_marshaller,
|
||||
}
|
||||
|
||||
_obj_by_version = {"1.0": v1.Event, "0.3": v03.Event}
|
||||
|
||||
_required_by_version = {
|
||||
"1.0": v1.Event._ce_required_fields,
|
||||
"0.3": v03.Event._ce_required_fields,
|
||||
}
|
||||
from cloudevents.pydantic.conversion import ( # noqa
|
||||
from_dict,
|
||||
from_http,
|
||||
from_json,
|
||||
to_binary,
|
||||
to_dict,
|
||||
to_json,
|
||||
to_structured,
|
||||
)
|
||||
from cloudevents.pydantic.event import CloudEvent # noqa
|
|
@ -0,0 +1,75 @@
|
|||
# Copyright 2018-Present The CloudEvents Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import typing
|
||||
|
||||
from cloudevents.conversion import from_dict as _abstract_from_dict
|
||||
from cloudevents.conversion import from_http as _abstract_from_http
|
||||
from cloudevents.conversion import from_json as _abstract_from_json
|
||||
from cloudevents.conversion import to_binary, to_dict, to_json, to_structured # noqa
|
||||
from cloudevents.pydantic.event import CloudEvent
|
||||
from cloudevents.sdk import types
|
||||
|
||||
|
||||
def from_http(
|
||||
headers: typing.Dict[str, str],
|
||||
data: typing.Union[str, bytes, None],
|
||||
data_unmarshaller: typing.Optional[types.UnmarshallerType] = None,
|
||||
) -> CloudEvent:
|
||||
"""
|
||||
Parses CloudEvent `data` and `headers` into a CloudEvent.
|
||||
|
||||
The method supports both binary and structured representations.
|
||||
|
||||
:param headers: The HTTP request headers.
|
||||
:param data: The HTTP request body. If set to None, "" or b'', the returned
|
||||
event's `data` field will be set to None.
|
||||
:param data_unmarshaller: Callable function to map data to a python object
|
||||
e.g. lambda x: x or lambda x: json.loads(x)
|
||||
:returns: A CloudEvent parsed from the passed HTTP parameters
|
||||
"""
|
||||
return _abstract_from_http(
|
||||
headers=headers,
|
||||
data=data,
|
||||
data_unmarshaller=data_unmarshaller,
|
||||
event_type=CloudEvent,
|
||||
)
|
||||
|
||||
|
||||
def from_json(
|
||||
data: typing.AnyStr,
|
||||
data_unmarshaller: types.UnmarshallerType = None,
|
||||
) -> CloudEvent:
|
||||
"""
|
||||
Parses JSON string `data` into a CloudEvent.
|
||||
|
||||
:param data: JSON string representation of a CloudEvent.
|
||||
:param data_unmarshaller: Callable function that casts `data` to a
|
||||
Python object.
|
||||
:returns: A CloudEvent parsed from the given JSON representation.
|
||||
"""
|
||||
return _abstract_from_json(
|
||||
data=data, data_unmarshaller=data_unmarshaller, event_type=CloudEvent
|
||||
)
|
||||
|
||||
|
||||
def from_dict(
|
||||
event: typing.Dict[str, typing.Any],
|
||||
) -> CloudEvent:
|
||||
"""
|
||||
Construct an CloudEvent from a dict `event` representation.
|
||||
|
||||
:param event: The event represented as a dict.
|
||||
:returns: A CloudEvent parsed from the given dict representation.
|
||||
"""
|
||||
return _abstract_from_dict(CloudEvent, event)
|
|
@ -0,0 +1,304 @@
|
|||
# Copyright 2018-Present The CloudEvents Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import datetime
|
||||
import json
|
||||
import typing
|
||||
|
||||
from cloudevents.exceptions import PydanticFeatureNotInstalled
|
||||
|
||||
try:
|
||||
import pydantic
|
||||
except ImportError: # pragma: no cover # hard to test
|
||||
raise PydanticFeatureNotInstalled(
|
||||
"CloudEvents pydantic feature is not installed. "
|
||||
"Install it using pip install cloudevents[pydantic]"
|
||||
)
|
||||
|
||||
import cloudevents.conversion
|
||||
from cloudevents import abstract, conversion, http
|
||||
from cloudevents.exceptions import IncompatibleArgumentsError
|
||||
from cloudevents.sdk.event import attribute
|
||||
|
||||
|
||||
def _ce_json_dumps(obj: typing.Dict[str, typing.Any], *args, **kwargs) -> str:
|
||||
"""
|
||||
Needed by the pydantic base-model to serialize the event correctly to json.
|
||||
Without this function the data will be incorrectly serialized.
|
||||
:param obj: CloudEvent represented as a dict.
|
||||
:param args: User arguments which will be passed to json.dumps function.
|
||||
:param kwargs: User arguments which will be passed to json.dumps function.
|
||||
:return: Event serialized as a standard JSON CloudEvent with user specific
|
||||
parameters.
|
||||
"""
|
||||
# Using HTTP from dict due to performance issues.
|
||||
# Pydantic is known for initialization time lagging.
|
||||
return json.dumps(
|
||||
# We SHOULD de-serialize the value, to serialize it back with
|
||||
# the correct json args and kwargs passed by the user.
|
||||
# This MAY cause performance issues in the future.
|
||||
# When that issue will cause real problem you MAY add a special keyword
|
||||
# argument that disabled this conversion
|
||||
json.loads(
|
||||
conversion.to_json(
|
||||
http.from_dict(obj),
|
||||
).decode("utf-8")
|
||||
),
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
def _ce_json_loads(
|
||||
data: typing.Union[str, bytes], *args, **kwargs # noqa
|
||||
) -> typing.Dict[typing.Any, typing.Any]:
|
||||
"""
|
||||
Needed by the pydantic base-model to de-serialize the event correctly from json.
|
||||
Without this function the data will be incorrectly de-serialized.
|
||||
:param obj: CloudEvent encoded as a json string.
|
||||
:param args: These arguments SHOULD NOT be passed by pydantic.
|
||||
Located here for fail-safe reasons, in-case it does.
|
||||
:param kwargs: These arguments SHOULD NOT be passed by pydantic.
|
||||
Located here for fail-safe reasons, in-case it does.
|
||||
:return: CloudEvent in a dict representation.
|
||||
"""
|
||||
# Using HTTP from dict due to performance issues.
|
||||
# Pydantic is known for initialization time lagging.
|
||||
return cloudevents.conversion.to_dict(http.from_json(data))
|
||||
|
||||
|
||||
class CloudEvent(abstract.CloudEvent, pydantic.BaseModel):
|
||||
"""
|
||||
A Python-friendly CloudEvent representation backed by Pydantic-modeled fields.
|
||||
|
||||
Supports both binary and structured modes of the CloudEvents v1 specification.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls, attributes: typing.Dict[str, typing.Any], data: typing.Optional[typing.Any]
|
||||
) -> "CloudEvent":
|
||||
return cls(attributes, data)
|
||||
|
||||
data: typing.Optional[typing.Any] = pydantic.Field(
|
||||
title="Event Data",
|
||||
description=(
|
||||
"CloudEvents MAY include domain-specific information about the occurrence."
|
||||
" When present, this information will be encapsulated within data.It is"
|
||||
" encoded into a media format which is specified by the datacontenttype"
|
||||
" attribute (e.g. application/json), and adheres to the dataschema format"
|
||||
" when those respective attributes are present."
|
||||
),
|
||||
)
|
||||
source: str = pydantic.Field(
|
||||
title="Event Source",
|
||||
description=(
|
||||
"Identifies the context in which an event happened. Often this will include"
|
||||
" information such as the type of the event source, the organization"
|
||||
" publishing the event or the process that produced the event. The exact"
|
||||
" syntax and semantics behind the data encoded in the URI is defined by the"
|
||||
" event producer.\n"
|
||||
"\n"
|
||||
"Producers MUST ensure that source + id is unique for"
|
||||
" each distinct event.\n"
|
||||
"\n"
|
||||
"An application MAY assign a unique source to each"
|
||||
" distinct producer, which makes it easy to produce unique IDs since no"
|
||||
" other producer will have the same source. The application MAY use UUIDs,"
|
||||
" URNs, DNS authorities or an application-specific scheme to create unique"
|
||||
" source identifiers.\n"
|
||||
"\n"
|
||||
"A source MAY include more than one producer. In"
|
||||
" that case the producers MUST collaborate to ensure that source + id is"
|
||||
" unique for each distinct event."
|
||||
),
|
||||
example="https://github.com/cloudevents",
|
||||
)
|
||||
|
||||
id: str = pydantic.Field(
|
||||
default_factory=attribute.default_id_selection_algorithm,
|
||||
title="Event ID",
|
||||
description=(
|
||||
"Identifies the event. Producers MUST ensure that source + id is unique for"
|
||||
" each distinct event. If a duplicate event is re-sent (e.g. due to a"
|
||||
" network error) it MAY have the same id. Consumers MAY assume that Events"
|
||||
" with identical source and id are duplicates. MUST be unique within the"
|
||||
" scope of the producer"
|
||||
),
|
||||
example="A234-1234-1234",
|
||||
)
|
||||
type: str = pydantic.Field(
|
||||
title="Event Type",
|
||||
description=(
|
||||
"This attribute contains a value describing the type of event related to"
|
||||
" the originating occurrence. Often this attribute is used for routing,"
|
||||
" observability, policy enforcement, etc. The format of this is producer"
|
||||
" defined and might include information such as the version of the type"
|
||||
),
|
||||
example="com.github.pull_request.opened",
|
||||
)
|
||||
specversion: attribute.SpecVersion = pydantic.Field(
|
||||
default=attribute.DEFAULT_SPECVERSION,
|
||||
title="Specification Version",
|
||||
description=(
|
||||
"The version of the CloudEvents specification which the event uses. This"
|
||||
" enables the interpretation of the context.\n"
|
||||
"\n"
|
||||
"Currently, this attribute will only have the 'major'"
|
||||
" and 'minor' version numbers included in it. This allows for 'patch'"
|
||||
" changes to the specification to be made without changing this property's"
|
||||
" value in the serialization."
|
||||
),
|
||||
example=attribute.DEFAULT_SPECVERSION,
|
||||
)
|
||||
time: typing.Optional[datetime.datetime] = pydantic.Field(
|
||||
default_factory=attribute.default_time_selection_algorithm,
|
||||
title="Occurrence Time",
|
||||
description=(
|
||||
" Timestamp of when the occurrence happened. If the time of the occurrence"
|
||||
" cannot be determined then this attribute MAY be set to some other time"
|
||||
" (such as the current time) by the CloudEvents producer, however all"
|
||||
" producers for the same source MUST be consistent in this respect. In"
|
||||
" other words, either they all use the actual time of the occurrence or"
|
||||
" they all use the same algorithm to determine the value used."
|
||||
),
|
||||
example="2018-04-05T17:31:00Z",
|
||||
)
|
||||
|
||||
subject: typing.Optional[str] = pydantic.Field(
|
||||
title="Event Subject",
|
||||
description=(
|
||||
"This describes the subject of the event in the context of the event"
|
||||
" producer (identified by source). In publish-subscribe scenarios, a"
|
||||
" subscriber will typically subscribe to events emitted by a source, but"
|
||||
" the source identifier alone might not be sufficient as a qualifier for"
|
||||
" any specific event if the source context has internal"
|
||||
" sub-structure.\n"
|
||||
"\n"
|
||||
"Identifying the subject of the event in context"
|
||||
" metadata (opposed to only in the data payload) is particularly helpful in"
|
||||
" generic subscription filtering scenarios where middleware is unable to"
|
||||
" interpret the data content. In the above example, the subscriber might"
|
||||
" only be interested in blobs with names ending with '.jpg' or '.jpeg' and"
|
||||
" the subject attribute allows for constructing a simple and efficient"
|
||||
" string-suffix filter for that subset of events."
|
||||
),
|
||||
example="123",
|
||||
)
|
||||
datacontenttype: typing.Optional[str] = pydantic.Field(
|
||||
title="Event Data Content Type",
|
||||
description=(
|
||||
"Content type of data value. This attribute enables data to carry any type"
|
||||
" of content, whereby format and encoding might differ from that of the"
|
||||
" chosen event format."
|
||||
),
|
||||
example="text/xml",
|
||||
)
|
||||
dataschema: typing.Optional[str] = pydantic.Field(
|
||||
title="Event Data Schema",
|
||||
description=(
|
||||
"Identifies the schema that data adheres to. "
|
||||
"Incompatible changes to the schema SHOULD be reflected by a different URI"
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
attributes: typing.Optional[typing.Dict[str, typing.Any]] = None,
|
||||
data: typing.Optional[typing.Any] = None,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
:param attributes: A dict with CloudEvent attributes.
|
||||
Minimally expects the attributes 'type' and 'source'. If not given the
|
||||
attributes 'specversion', 'id' or 'time', this will create
|
||||
those attributes with default values.
|
||||
|
||||
If no attribute is given the class MUST use the kwargs as the attributes.
|
||||
|
||||
Example Attributes:
|
||||
{
|
||||
"specversion": "1.0",
|
||||
"type": "com.github.pull_request.opened",
|
||||
"source": "https://github.com/cloudevents/spec/pull",
|
||||
"id": "A234-1234-1234",
|
||||
"time": "2018-04-05T17:31:00Z",
|
||||
}
|
||||
|
||||
:param data: Domain-specific information about the occurrence.
|
||||
"""
|
||||
if attributes:
|
||||
if len(kwargs) != 0:
|
||||
# To prevent API complexity and confusion.
|
||||
raise IncompatibleArgumentsError(
|
||||
"Attributes dict and kwargs are incompatible."
|
||||
)
|
||||
attributes = {k.lower(): v for k, v in attributes.items()}
|
||||
kwargs.update(attributes)
|
||||
super(CloudEvent, self).__init__(data=data, **kwargs)
|
||||
|
||||
class Config:
|
||||
extra: str = "allow" # this is the way we implement extensions
|
||||
schema_extra = {
|
||||
"example": {
|
||||
"specversion": "1.0",
|
||||
"type": "com.github.pull_request.opened",
|
||||
"source": "https://github.com/cloudevents/spec/pull",
|
||||
"subject": "123",
|
||||
"id": "A234-1234-1234",
|
||||
"time": "2018-04-05T17:31:00Z",
|
||||
"comexampleextension1": "value",
|
||||
"comexampleothervalue": 5,
|
||||
"datacontenttype": "text/xml",
|
||||
"data": '<much wow="xml"/>',
|
||||
}
|
||||
}
|
||||
json_dumps = _ce_json_dumps
|
||||
json_loads = _ce_json_loads
|
||||
|
||||
def _get_attributes(self) -> typing.Dict[str, typing.Any]:
|
||||
return {
|
||||
key: conversion.best_effort_encode_attribute_value(value)
|
||||
for key, value in self.__dict__.items()
|
||||
if key != "data"
|
||||
}
|
||||
|
||||
def _get_data(self) -> typing.Optional[typing.Any]:
|
||||
return self.data
|
||||
|
||||
def __setitem__(self, key: str, value: typing.Any) -> None:
|
||||
"""
|
||||
Set event attribute value
|
||||
|
||||
MUST NOT set event data with this method, use `.data` member instead
|
||||
|
||||
Method SHOULD mimic `cloudevents.http.event.CloudEvent` interface
|
||||
|
||||
:param key: Event attribute name
|
||||
:param value: New event attribute value
|
||||
"""
|
||||
if key != "data": # to mirror the behaviour of the http event
|
||||
setattr(self, key, value)
|
||||
else:
|
||||
pass # It is de-facto ignored by the http event
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
"""
|
||||
SHOULD raise `KeyError` if no event attribute for the given key exists.
|
||||
|
||||
Method SHOULD mimic `cloudevents.http.event.CloudEvent` interface
|
||||
:param key: The event attribute name.
|
||||
"""
|
||||
if key == "data":
|
||||
raise KeyError(key) # to mirror the behaviour of the http event
|
||||
delattr(self, key)
|
|
@ -13,6 +13,8 @@
|
|||
# under the License.
|
||||
|
||||
from cloudevents.sdk.converters import binary, structured
|
||||
from cloudevents.sdk.converters.binary import is_binary # noqa
|
||||
from cloudevents.sdk.converters.structured import is_structured # noqa
|
||||
|
||||
TypeBinary = binary.BinaryHTTPCloudEventConverter.TYPE
|
||||
TypeStructured = structured.JSONHTTPCloudEventConverter.TYPE
|
||||
|
|
|
@ -57,3 +57,17 @@ class BinaryHTTPCloudEventConverter(base.Converter):
|
|||
|
||||
def NewBinaryHTTPCloudEventConverter() -> BinaryHTTPCloudEventConverter:
|
||||
return BinaryHTTPCloudEventConverter()
|
||||
|
||||
|
||||
def is_binary(headers: typing.Dict[str, str]) -> bool:
|
||||
"""
|
||||
Determines whether an event with the supplied `headers` is in binary format.
|
||||
|
||||
:param headers: The HTTP headers of a potential event.
|
||||
:returns: Returns a bool indicating whether the headers indicate
|
||||
a binary event type.
|
||||
"""
|
||||
headers = {key.lower(): value for key, value in headers.items()}
|
||||
content_type = headers.get("content-type", "")
|
||||
binary_parser = BinaryHTTPCloudEventConverter()
|
||||
return binary_parser.can_read(content_type=content_type, headers=headers)
|
||||
|
|
|
@ -56,3 +56,17 @@ class JSONHTTPCloudEventConverter(base.Converter):
|
|||
|
||||
def NewJSONHTTPCloudEventConverter() -> JSONHTTPCloudEventConverter:
|
||||
return JSONHTTPCloudEventConverter()
|
||||
|
||||
|
||||
def is_structured(headers: typing.Dict[str, str]) -> bool:
|
||||
"""
|
||||
Determines whether an event with the supplied `headers` is in a structured format.
|
||||
|
||||
:param headers: The HTTP headers of a potential event.
|
||||
:returns: Returns a bool indicating whether the headers indicate
|
||||
a structured event type.
|
||||
"""
|
||||
headers = {key.lower(): value for key, value in headers.items()}
|
||||
content_type = headers.get("content-type", "")
|
||||
structured_parser = JSONHTTPCloudEventConverter()
|
||||
return structured_parser.can_read(content_type=content_type, headers=headers)
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
# Copyright 2018-Present The CloudEvents Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
import uuid
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class SpecVersion(str, Enum):
|
||||
"""
|
||||
The version of the CloudEvents specification which an event uses.
|
||||
This enables the interpretation of the context.
|
||||
|
||||
Currently, this attribute will only have the 'major' and 'minor' version numbers
|
||||
included in it. This allows for 'patch' changes to the specification to be made
|
||||
without changing this property's value in the serialization.
|
||||
"""
|
||||
|
||||
v0_3 = "0.3"
|
||||
v1_0 = "1.0"
|
||||
|
||||
|
||||
DEFAULT_SPECVERSION = SpecVersion.v1_0
|
||||
|
||||
|
||||
def default_time_selection_algorithm() -> datetime:
|
||||
"""
|
||||
:return: A time value which will be used as CloudEvent time attribute value.
|
||||
"""
|
||||
return datetime.datetime.now(datetime.timezone.utc)
|
||||
|
||||
|
||||
def default_id_selection_algorithm() -> str:
|
||||
"""
|
||||
:return: Globally unique id to be used as a CloudEvent id attribute value.
|
||||
"""
|
||||
return str(uuid.uuid4())
|
|
@ -0,0 +1,56 @@
|
|||
# Copyright 2018-Present The CloudEvents Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import pytest
|
||||
|
||||
from cloudevents.conversion import _best_effort_serialize_to_json
|
||||
from cloudevents.http import CloudEvent
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def dummy_event():
|
||||
return CloudEvent({"type": "dummy", "source": "dummy"})
|
||||
|
||||
|
||||
def test_json_methods(dummy_event):
|
||||
from cloudevents.http import from_json, to_json
|
||||
from cloudevents.http.json_methods import from_json as deprecated_from_json
|
||||
from cloudevents.http.json_methods import to_json as deprecated_to_json
|
||||
|
||||
assert from_json(to_json(dummy_event)) == deprecated_from_json(
|
||||
deprecated_to_json(dummy_event)
|
||||
)
|
||||
|
||||
|
||||
def test_http_methods(dummy_event):
|
||||
from cloudevents.http import from_http, to_binary, to_structured
|
||||
from cloudevents.http.http_methods import from_http as deprecated_from_http
|
||||
from cloudevents.http.http_methods import to_binary as deprecated_to_binary
|
||||
from cloudevents.http.http_methods import to_structured as deprecated_to_structured
|
||||
|
||||
assert from_http(*to_binary(dummy_event)) == deprecated_from_http(
|
||||
*deprecated_to_binary(dummy_event)
|
||||
)
|
||||
assert from_http(*to_structured(dummy_event)) == deprecated_from_http(
|
||||
*deprecated_to_structured(dummy_event)
|
||||
)
|
||||
|
||||
|
||||
def test_util():
|
||||
from cloudevents.http.util import default_marshaller # noqa
|
||||
|
||||
assert _best_effort_serialize_to_json(None) == default_marshaller(None)
|
||||
|
||||
|
||||
def test_event_type():
|
||||
from cloudevents.http.event_type import is_binary, is_structured # noqa
|
|
@ -15,8 +15,8 @@
|
|||
import pytest
|
||||
|
||||
import cloudevents.exceptions as cloud_exceptions
|
||||
from cloudevents.conversion import _json_or_string
|
||||
from cloudevents.http import CloudEvent
|
||||
from cloudevents.http.util import _json_or_string
|
||||
|
||||
|
||||
@pytest.fixture(params=["0.3", "1.0"])
|
||||
|
|
|
@ -0,0 +1,158 @@
|
|||
# Copyright 2018-Present The CloudEvents Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from cloudevents.http import CloudEvent, from_dict, from_json, to_dict, to_json
|
||||
from cloudevents.sdk.event.attribute import SpecVersion
|
||||
|
||||
test_data = json.dumps({"data-key": "val"})
|
||||
test_attributes = {
|
||||
"type": "com.example.string",
|
||||
"source": "https://example.com/event-producer",
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
|
||||
def test_to_json(specversion):
|
||||
event = CloudEvent(test_attributes, test_data)
|
||||
event_json = to_json(event)
|
||||
event_dict = json.loads(event_json)
|
||||
|
||||
for key, val in test_attributes.items():
|
||||
assert event_dict[key] == val
|
||||
|
||||
assert event_dict["data"] == test_data
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
|
||||
def test_to_json_base64(specversion):
|
||||
data = b"test123"
|
||||
|
||||
event = CloudEvent(test_attributes, data)
|
||||
event_json = to_json(event)
|
||||
event_dict = json.loads(event_json)
|
||||
|
||||
for key, val in test_attributes.items():
|
||||
assert event_dict[key] == val
|
||||
|
||||
# test data was properly marshalled into data_base64
|
||||
data_base64 = event_dict["data_base64"].encode()
|
||||
test_data_base64 = base64.b64encode(data)
|
||||
|
||||
assert data_base64 == test_data_base64
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
|
||||
def test_from_json(specversion):
|
||||
payload = {
|
||||
"type": "com.example.string",
|
||||
"source": "https://example.com/event-producer",
|
||||
"id": "1234",
|
||||
"specversion": specversion,
|
||||
"data": {"data-key": "val"},
|
||||
}
|
||||
event = from_json(json.dumps(payload))
|
||||
|
||||
for key, val in payload.items():
|
||||
if key == "data":
|
||||
assert event.data == payload["data"]
|
||||
else:
|
||||
assert event[key] == val
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
|
||||
def test_from_json_base64(specversion):
|
||||
# Create base64 encoded data
|
||||
raw_data = {"data-key": "val"}
|
||||
data = json.dumps(raw_data).encode()
|
||||
data_base64_str = base64.b64encode(data).decode()
|
||||
|
||||
# Create json payload
|
||||
payload = {
|
||||
"type": "com.example.string",
|
||||
"source": "https://example.com/event-producer",
|
||||
"id": "1234",
|
||||
"specversion": specversion,
|
||||
"data_base64": data_base64_str,
|
||||
}
|
||||
payload_json = json.dumps(payload)
|
||||
|
||||
# Create event
|
||||
event = from_json(payload_json)
|
||||
|
||||
# Test fields were marshalled properly
|
||||
for key, val in payload.items():
|
||||
if key == "data_base64":
|
||||
# Check data_base64 was unmarshalled properly
|
||||
assert event.data == raw_data
|
||||
else:
|
||||
assert event[key] == val
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
|
||||
def test_json_can_talk_to_itself(specversion):
|
||||
event = CloudEvent(test_attributes, test_data)
|
||||
event_json = to_json(event)
|
||||
|
||||
event = from_json(event_json)
|
||||
|
||||
for key, val in test_attributes.items():
|
||||
assert event[key] == val
|
||||
assert event.data == test_data
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
|
||||
def test_json_can_talk_to_itself_base64(specversion):
|
||||
data = b"test123"
|
||||
|
||||
event = CloudEvent(test_attributes, data)
|
||||
event_json = to_json(event)
|
||||
|
||||
event = from_json(event_json)
|
||||
|
||||
for key, val in test_attributes.items():
|
||||
assert event[key] == val
|
||||
assert event.data == data
|
||||
|
||||
|
||||
def test_from_dict():
|
||||
given = {
|
||||
"data": b"\x00\x00\x11Hello World",
|
||||
"datacontenttype": "application/octet-stream",
|
||||
"dataschema": None,
|
||||
"id": "11775cb2-fd00-4487-a18b-30c3600eaa5f",
|
||||
"source": "dummy:source",
|
||||
"specversion": SpecVersion.v1_0,
|
||||
"subject": None,
|
||||
"time": datetime.datetime(
|
||||
2022, 7, 16, 12, 3, 20, 519216, tzinfo=datetime.timezone.utc
|
||||
),
|
||||
"type": "dummy.type",
|
||||
}
|
||||
assert to_dict(from_dict(given)) == {
|
||||
"data": b"\x00\x00\x11Hello World",
|
||||
"datacontenttype": "application/octet-stream",
|
||||
"dataschema": None,
|
||||
"id": "11775cb2-fd00-4487-a18b-30c3600eaa5f",
|
||||
"source": "dummy:source",
|
||||
"specversion": "1.0",
|
||||
"subject": None,
|
||||
"time": "2022-07-16T12:03:20.519216+00:00",
|
||||
"type": "dummy.type",
|
||||
}
|
|
@ -20,15 +20,12 @@ import pytest
|
|||
from sanic import Sanic, response
|
||||
|
||||
import cloudevents.exceptions as cloud_exceptions
|
||||
from cloudevents.http import (
|
||||
CloudEvent,
|
||||
from_http,
|
||||
is_binary,
|
||||
is_structured,
|
||||
to_binary,
|
||||
to_structured,
|
||||
)
|
||||
from cloudevents.http import CloudEvent, from_http, to_binary, to_structured
|
||||
from cloudevents.http.event_type import is_binary as deprecated_is_binary
|
||||
from cloudevents.http.event_type import is_structured as deprecated_is_structured
|
||||
from cloudevents.sdk import converters
|
||||
from cloudevents.sdk.converters.binary import is_binary
|
||||
from cloudevents.sdk.converters.structured import is_structured
|
||||
|
||||
invalid_test_headers = [
|
||||
{
|
||||
|
@ -358,23 +355,36 @@ def test_structured_no_content_type(specversion):
|
|||
assert event.data[key] == val
|
||||
|
||||
|
||||
def test_is_binary():
|
||||
headers = {
|
||||
"ce-id": "my-id",
|
||||
"ce-source": "<event-source>",
|
||||
"ce-type": "cloudevent.event.type",
|
||||
"ce-specversion": "1.0",
|
||||
"Content-Type": "text/plain",
|
||||
}
|
||||
assert is_binary(headers)
|
||||
parameterize_binary_func = pytest.mark.parametrize(
|
||||
"is_binary_func", [is_binary, deprecated_is_binary]
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/cloudevents+json",
|
||||
}
|
||||
assert not is_binary(headers)
|
||||
|
||||
headers = {}
|
||||
assert not is_binary(headers)
|
||||
@parameterize_binary_func
|
||||
def test_empty_headers_must_not_be_recognized_as_binary(is_binary_func):
|
||||
assert not is_binary_func({})
|
||||
|
||||
|
||||
@parameterize_binary_func
|
||||
def test_non_binary_headers_must_not_be_recognized_as_binary(is_binary_func):
|
||||
assert not is_binary_func(
|
||||
{
|
||||
"Content-Type": "application/cloudevents+json",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@parameterize_binary_func
|
||||
def test_binary_ce_headers_must_be_recognize_as_binary(is_binary_func):
|
||||
assert is_binary_func(
|
||||
{
|
||||
"ce-id": "my-id",
|
||||
"ce-source": "<event-source>",
|
||||
"ce-type": "cloudevent.event.type",
|
||||
"ce-specversion": "1.0",
|
||||
"Content-Type": "text/plain",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
|
@ -438,11 +448,14 @@ def test_wrong_specversion_to_request():
|
|||
assert "Unsupported specversion: 0.2" in str(e.value)
|
||||
|
||||
|
||||
def test_is_structured():
|
||||
@pytest.mark.parametrize(
|
||||
"is_structured_func", [is_structured, deprecated_is_structured]
|
||||
)
|
||||
def test_is_structured(is_structured_func):
|
||||
headers = {
|
||||
"Content-Type": "application/cloudevents+json",
|
||||
}
|
||||
assert is_structured(headers)
|
||||
assert is_structured_func(headers)
|
||||
|
||||
headers = {
|
||||
"ce-id": "my-id",
|
||||
|
@ -451,7 +464,7 @@ def test_is_structured():
|
|||
"ce-specversion": "1.0",
|
||||
"Content-Type": "text/plain",
|
||||
}
|
||||
assert not is_structured(headers)
|
||||
assert not is_structured_func(headers)
|
||||
|
||||
|
||||
def test_empty_json_structured():
|
||||
|
|
|
@ -0,0 +1,349 @@
|
|||
# Copyright 2018-Present The CloudEvents Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import datetime
|
||||
from json import loads
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from cloudevents.conversion import _json_or_string
|
||||
from cloudevents.exceptions import IncompatibleArgumentsError
|
||||
from cloudevents.pydantic import CloudEvent
|
||||
from cloudevents.sdk.event.attribute import SpecVersion
|
||||
|
||||
_DUMMY_SOURCE = "dummy:source"
|
||||
_DUMMY_TYPE = "tests.cloudevents.override"
|
||||
_DUMMY_TIME = "2022-07-16T11:20:34.284130+00:00"
|
||||
_DUMMY_ID = "my-id"
|
||||
|
||||
|
||||
@pytest.fixture(params=["0.3", "1.0"])
|
||||
def specversion(request):
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def dummy_attributes(specversion):
|
||||
return {
|
||||
"source": _DUMMY_SOURCE,
|
||||
"specversion": specversion,
|
||||
"id": _DUMMY_ID,
|
||||
"time": _DUMMY_TIME,
|
||||
"type": _DUMMY_TYPE,
|
||||
"datacontenttype": "application/json",
|
||||
"subject": "my-subject",
|
||||
"dataschema": "myschema:dummy",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def my_dummy_data():
|
||||
return '{"name":"john"}'
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def your_dummy_data():
|
||||
return '{"name":"paul"}'
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def dummy_event(dummy_attributes, my_dummy_data):
|
||||
return CloudEvent(attributes=dummy_attributes, data=my_dummy_data)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def non_exiting_attribute_name(dummy_event):
|
||||
result = "nonexisting"
|
||||
assert result not in dummy_event
|
||||
return result
|
||||
|
||||
|
||||
def test_pydantic_cloudevent_equality(dummy_attributes, my_dummy_data, your_dummy_data):
|
||||
data = my_dummy_data
|
||||
event1 = CloudEvent(dummy_attributes, data)
|
||||
event2 = CloudEvent(dummy_attributes, data)
|
||||
assert event1 == event2
|
||||
# Test different attributes
|
||||
for key in dummy_attributes:
|
||||
if key in ("specversion", "time", "datacontenttype", "dataschema"):
|
||||
continue
|
||||
else:
|
||||
dummy_attributes[key] = f"noise-{key}"
|
||||
event3 = CloudEvent(dummy_attributes, data)
|
||||
event2 = CloudEvent(dummy_attributes, data)
|
||||
assert event2 == event3
|
||||
assert event1 != event2 and event3 != event1
|
||||
|
||||
# Test different data
|
||||
data = your_dummy_data
|
||||
event3 = CloudEvent(dummy_attributes, data)
|
||||
event2 = CloudEvent(dummy_attributes, data)
|
||||
assert event2 == event3
|
||||
assert event1 != event2 and event3 != event1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"non_cloudevent_value",
|
||||
(
|
||||
1,
|
||||
None,
|
||||
object(),
|
||||
"Hello World",
|
||||
),
|
||||
)
|
||||
def test_http_cloudevent_must_not_equal_to_non_cloudevent_value(
|
||||
dummy_event, non_cloudevent_value
|
||||
):
|
||||
assert not dummy_event == non_cloudevent_value
|
||||
|
||||
|
||||
def test_http_cloudevent_mutates_equality(
|
||||
dummy_attributes, my_dummy_data, your_dummy_data
|
||||
):
|
||||
data = my_dummy_data
|
||||
event1 = CloudEvent(dummy_attributes, data)
|
||||
event2 = CloudEvent(dummy_attributes, data)
|
||||
event3 = CloudEvent(dummy_attributes, data)
|
||||
|
||||
assert event1 == event2
|
||||
# Test different attributes
|
||||
for key in dummy_attributes:
|
||||
if key in ("specversion", "time", "datacontenttype"):
|
||||
continue
|
||||
else:
|
||||
event2[key] = f"noise-{key}"
|
||||
event3[key] = f"noise-{key}"
|
||||
assert event2 == event3
|
||||
assert event1 != event2 and event3 != event1
|
||||
|
||||
# Test different data
|
||||
event2.data = your_dummy_data
|
||||
event3.data = your_dummy_data
|
||||
assert event2 == event3
|
||||
assert event1 != event2 and event3 != event1
|
||||
|
||||
|
||||
def test_cloudevent_missing_specversion():
|
||||
attributes = {"specversion": "0.2", "source": "s", "type": "t"}
|
||||
with pytest.raises(ValidationError) as e:
|
||||
_ = CloudEvent(attributes, None)
|
||||
assert "value is not a valid enumeration member; permitted: '0.3', '1.0'" in str(
|
||||
e.value
|
||||
)
|
||||
|
||||
|
||||
def test_cloudevent_missing_minimal_required_fields():
|
||||
attributes = {"type": "t"}
|
||||
with pytest.raises(ValidationError) as e:
|
||||
_ = CloudEvent(attributes, None)
|
||||
assert "\nsource\n field required " in str(e.value)
|
||||
|
||||
attributes = {"source": "s"}
|
||||
with pytest.raises(ValidationError) as e:
|
||||
_ = CloudEvent(attributes, None)
|
||||
assert "\ntype\n field required " in str(e.value)
|
||||
|
||||
|
||||
def test_cloudevent_general_overrides():
|
||||
event = CloudEvent(
|
||||
{
|
||||
"source": "my-source",
|
||||
"type": "com.test.overrides",
|
||||
"subject": "my-subject",
|
||||
},
|
||||
None,
|
||||
)
|
||||
expected_attributes = [
|
||||
"time",
|
||||
"source",
|
||||
"id",
|
||||
"specversion",
|
||||
"type",
|
||||
"subject",
|
||||
"datacontenttype",
|
||||
"dataschema",
|
||||
]
|
||||
|
||||
assert len(event) == len(expected_attributes)
|
||||
for attribute in expected_attributes:
|
||||
assert attribute in event
|
||||
del event[attribute]
|
||||
assert len(event) == 0
|
||||
|
||||
|
||||
def test_none_json_or_string():
|
||||
assert _json_or_string(None) is None
|
||||
|
||||
|
||||
def test_get_operation_on_non_existing_attribute_must_not_raise_exception(
|
||||
dummy_event, non_exiting_attribute_name
|
||||
):
|
||||
dummy_event.get(non_exiting_attribute_name)
|
||||
|
||||
|
||||
def test_get_must_return_attribute_value_if_exists(dummy_event):
|
||||
assert dummy_event.get("source") == dummy_event["source"]
|
||||
|
||||
|
||||
def test_get_operation_on_non_existing_attribute_must_return_none_by_default(
|
||||
dummy_event, non_exiting_attribute_name
|
||||
):
|
||||
assert dummy_event.get(non_exiting_attribute_name) is None
|
||||
|
||||
|
||||
def test_get_operation_on_non_existing_attribute_must_return_default_value_if_given(
|
||||
dummy_event, non_exiting_attribute_name
|
||||
):
|
||||
dummy_value = "Hello World"
|
||||
assert dummy_event.get(non_exiting_attribute_name, dummy_value) == dummy_value
|
||||
|
||||
|
||||
def test_get_operation_on_non_existing_attribute_should_not_copy_default_value(
|
||||
dummy_event, non_exiting_attribute_name
|
||||
):
|
||||
dummy_value = object()
|
||||
assert dummy_event.get(non_exiting_attribute_name, dummy_value) is dummy_value
|
||||
|
||||
|
||||
@pytest.mark.xfail() # https://github.com/cloudevents/sdk-python/issues/185
|
||||
def test_json_data_serialization_without_explicit_type():
|
||||
assert loads(
|
||||
CloudEvent(
|
||||
source=_DUMMY_SOURCE, type=_DUMMY_TYPE, data='{"hello": "world"}'
|
||||
).json()
|
||||
)["data"] == {"hello": "world"}
|
||||
|
||||
|
||||
@pytest.mark.xfail() # https://github.com/cloudevents/sdk-python/issues/185
|
||||
@pytest.mark.parametrize(
|
||||
"json_content_type",
|
||||
[
|
||||
"application/json",
|
||||
"application/ld+json",
|
||||
"application/x-my-custom-type+json",
|
||||
"text/html+json",
|
||||
],
|
||||
)
|
||||
def test_json_data_serialization_with_explicit_json_content_type(
|
||||
dummy_attributes, json_content_type
|
||||
):
|
||||
dummy_attributes["datacontenttype"] = json_content_type
|
||||
assert loads(CloudEvent(dummy_attributes, data='{"hello": "world"}',).json())[
|
||||
"data"
|
||||
] == {"hello": "world"}
|
||||
|
||||
|
||||
_NON_JSON_CONTENT_TYPES = [
|
||||
pytest.param("video/mp2t", id="MPEG transport stream"),
|
||||
pytest.param("text/plain", id="Text, (generally ASCII or ISO 8859-n)"),
|
||||
pytest.param("application/vnd.visio", id="Microsoft Visio"),
|
||||
pytest.param("audio/wav", id="Waveform Audio Format"),
|
||||
pytest.param("audio/webm", id="WEBM audio"),
|
||||
pytest.param("video/webm", id="WEBM video"),
|
||||
pytest.param("image/webp", id="WEBP image"),
|
||||
pytest.param("application/gzip", id="GZip Compressed Archive"),
|
||||
pytest.param("image/gif", id="Graphics Interchange Format (GIF)"),
|
||||
pytest.param("text/html", id="HyperText Markup Language (HTML)"),
|
||||
pytest.param("image/vnd.microsoft.icon", id="Icon format"),
|
||||
pytest.param("text/calendar", id="iCalendar format"),
|
||||
pytest.param("application/java-archive", id="Java Archive (JAR)"),
|
||||
pytest.param("image/jpeg", id="JPEG images"),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("datacontenttype", _NON_JSON_CONTENT_TYPES)
|
||||
def test_json_data_serialization_with_explicit_non_json_content_type(
|
||||
dummy_attributes, datacontenttype
|
||||
):
|
||||
dummy_attributes["datacontenttype"] = datacontenttype
|
||||
event = CloudEvent(
|
||||
dummy_attributes,
|
||||
data='{"hello": "world"}',
|
||||
).json()
|
||||
assert loads(event)["data"] == '{"hello": "world"}'
|
||||
|
||||
|
||||
@pytest.mark.parametrize("datacontenttype", _NON_JSON_CONTENT_TYPES)
|
||||
def test_binary_data_serialization(dummy_attributes, datacontenttype):
|
||||
dummy_attributes["datacontenttype"] = datacontenttype
|
||||
event = CloudEvent(
|
||||
dummy_attributes,
|
||||
data=b"\x00\x00\x11Hello World",
|
||||
).json()
|
||||
result_json = loads(event)
|
||||
assert result_json["data_base64"] == "AAARSGVsbG8gV29ybGQ="
|
||||
assert "daata" not in result_json
|
||||
|
||||
|
||||
def test_binary_data_deserialization():
|
||||
given = (
|
||||
b'{"source": "dummy:source", "id": "11775cb2-fd00-4487-a18b-30c3600eaa5f",'
|
||||
b' "type": "dummy.type", "specversion": "1.0", "time":'
|
||||
b' "2022-07-16T12:03:20.519216+00:00", "subject": null, "datacontenttype":'
|
||||
b' "application/octet-stream", "dataschema": null, "data_base64":'
|
||||
b' "AAARSGVsbG8gV29ybGQ="}'
|
||||
)
|
||||
expected = {
|
||||
"data": b"\x00\x00\x11Hello World",
|
||||
"datacontenttype": "application/octet-stream",
|
||||
"dataschema": None,
|
||||
"id": "11775cb2-fd00-4487-a18b-30c3600eaa5f",
|
||||
"source": "dummy:source",
|
||||
"specversion": SpecVersion.v1_0,
|
||||
"subject": None,
|
||||
"time": datetime.datetime(
|
||||
2022, 7, 16, 12, 3, 20, 519216, tzinfo=datetime.timezone.utc
|
||||
),
|
||||
"type": "dummy.type",
|
||||
}
|
||||
assert CloudEvent.parse_raw(given).dict() == expected
|
||||
|
||||
|
||||
def test_access_data_event_attribute_should_raise_key_error(dummy_event):
|
||||
with pytest.raises(KeyError):
|
||||
dummy_event["data"]
|
||||
|
||||
|
||||
def test_delete_data_event_attribute_should_raise_key_error(dummy_event):
|
||||
with pytest.raises(KeyError):
|
||||
del dummy_event["data"]
|
||||
|
||||
|
||||
def test_setting_data_attribute_should_not_affect_actual_data(dummy_event):
|
||||
my_data = object()
|
||||
dummy_event["data"] = my_data
|
||||
assert dummy_event.data != my_data
|
||||
|
||||
|
||||
def test_event_length(dummy_event, dummy_attributes):
|
||||
assert len(dummy_event) == len(dummy_attributes)
|
||||
|
||||
|
||||
def test_access_data_attribute_with_get_should_return_default(dummy_event):
|
||||
default = object()
|
||||
assert dummy_event.get("data", default) is default
|
||||
|
||||
|
||||
def test_pydantic_repr_should_contain_attributes_and_data(dummy_event):
|
||||
assert "attributes" in repr(dummy_event)
|
||||
assert "data" in repr(dummy_event)
|
||||
|
||||
|
||||
def test_data_must_never_exist_as_an_attribute_name(dummy_event):
|
||||
assert "data" not in dummy_event
|
||||
|
||||
|
||||
def test_attributes_and_kwards_are_incompatible():
|
||||
with pytest.raises(IncompatibleArgumentsError):
|
||||
CloudEvent({"a": "b"}, other="hello world")
|
|
@ -13,11 +13,13 @@
|
|||
# under the License.
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from cloudevents.http import CloudEvent, from_json, to_json
|
||||
from cloudevents.pydantic import CloudEvent, from_dict, from_json, to_json
|
||||
from cloudevents.sdk.event.attribute import SpecVersion
|
||||
|
||||
test_data = json.dumps({"data-key": "val"})
|
||||
test_attributes = {
|
||||
|
@ -127,3 +129,28 @@ def test_json_can_talk_to_itself_base64(specversion):
|
|||
for key, val in test_attributes.items():
|
||||
assert event[key] == val
|
||||
assert event.data == data
|
||||
|
||||
|
||||
def test_from_dict():
|
||||
given = {
|
||||
"data": b"\x00\x00\x11Hello World",
|
||||
"datacontenttype": "application/octet-stream",
|
||||
"dataschema": None,
|
||||
"id": "11775cb2-fd00-4487-a18b-30c3600eaa5f",
|
||||
"source": "dummy:source",
|
||||
"specversion": SpecVersion.v1_0,
|
||||
"subject": None,
|
||||
"time": datetime.datetime(
|
||||
2022, 7, 16, 12, 3, 20, 519216, tzinfo=datetime.timezone.utc
|
||||
),
|
||||
"type": "dummy.type",
|
||||
}
|
||||
assert from_dict(given).dict() == given
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
|
||||
def test_pydantic_json_function_parameters_must_affect_output(specversion):
|
||||
event = CloudEvent(test_attributes, test_data)
|
||||
v1 = event.json(indent=2, sort_keys=True)
|
||||
v2 = event.json(indent=4, sort_keys=True)
|
||||
assert v1 != v2
|
|
@ -0,0 +1,513 @@
|
|||
# Copyright 2018-Present The CloudEvents Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import bz2
|
||||
import io
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from sanic import Sanic, response
|
||||
|
||||
import cloudevents.exceptions as cloud_exceptions
|
||||
from cloudevents.pydantic import CloudEvent, from_http, to_binary, to_structured
|
||||
from cloudevents.sdk import converters
|
||||
from cloudevents.sdk.converters.binary import is_binary
|
||||
from cloudevents.sdk.converters.structured import is_structured
|
||||
|
||||
invalid_test_headers = [
|
||||
{
|
||||
"ce-source": "<event-source>",
|
||||
"ce-type": "cloudevent.event.type",
|
||||
"ce-specversion": "1.0",
|
||||
},
|
||||
{
|
||||
"ce-id": "my-id",
|
||||
"ce-type": "cloudevent.event.type",
|
||||
"ce-specversion": "1.0",
|
||||
},
|
||||
{"ce-id": "my-id", "ce-source": "<event-source>", "ce-specversion": "1.0"},
|
||||
{
|
||||
"ce-id": "my-id",
|
||||
"ce-source": "<event-source>",
|
||||
"ce-type": "cloudevent.event.type",
|
||||
},
|
||||
]
|
||||
|
||||
invalid_cloudevent_request_body = [
|
||||
{
|
||||
"source": "<event-source>",
|
||||
"type": "cloudevent.event.type",
|
||||
"specversion": "1.0",
|
||||
},
|
||||
{"id": "my-id", "type": "cloudevent.event.type", "specversion": "1.0"},
|
||||
{"id": "my-id", "source": "<event-source>", "specversion": "1.0"},
|
||||
{
|
||||
"id": "my-id",
|
||||
"source": "<event-source>",
|
||||
"type": "cloudevent.event.type",
|
||||
},
|
||||
]
|
||||
|
||||
test_data = {"payload-content": "Hello World!"}
|
||||
|
||||
app = Sanic("test_pydantic_http_events")
|
||||
|
||||
|
||||
@app.route("/event", ["POST"])
|
||||
async def echo(request):
|
||||
decoder = None
|
||||
if "binary-payload" in request.headers:
|
||||
decoder = lambda x: x
|
||||
event = from_http(dict(request.headers), request.body, data_unmarshaller=decoder)
|
||||
data = (
|
||||
event.data
|
||||
if isinstance(event.data, (bytes, bytearray, memoryview))
|
||||
else json.dumps(event.data).encode()
|
||||
)
|
||||
return response.raw(data, headers={k: event[k] for k in event})
|
||||
|
||||
|
||||
@pytest.mark.parametrize("body", invalid_cloudevent_request_body)
|
||||
def test_missing_required_fields_structured(body):
|
||||
with pytest.raises(cloud_exceptions.MissingRequiredFields):
|
||||
|
||||
_ = from_http(
|
||||
{"Content-Type": "application/cloudevents+json"}, json.dumps(body)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("headers", invalid_test_headers)
|
||||
def test_missing_required_fields_binary(headers):
|
||||
with pytest.raises(cloud_exceptions.MissingRequiredFields):
|
||||
_ = from_http(headers, json.dumps(test_data))
|
||||
|
||||
|
||||
@pytest.mark.parametrize("headers", invalid_test_headers)
|
||||
def test_missing_required_fields_empty_data_binary(headers):
|
||||
# Test for issue #115
|
||||
with pytest.raises(cloud_exceptions.MissingRequiredFields):
|
||||
_ = from_http(headers, None)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_emit_binary_event(specversion):
|
||||
headers = {
|
||||
"ce-id": "my-id",
|
||||
"ce-source": "<event-source>",
|
||||
"ce-type": "cloudevent.event.type",
|
||||
"ce-specversion": specversion,
|
||||
"Content-Type": "text/plain",
|
||||
}
|
||||
data = json.dumps(test_data)
|
||||
_, r = app.test_client.post("/event", headers=headers, data=data)
|
||||
|
||||
# Convert byte array to dict
|
||||
# e.g. r.body = b'{"payload-content": "Hello World!"}'
|
||||
body = json.loads(r.body.decode("utf-8"))
|
||||
|
||||
# Check response fields
|
||||
for key in test_data:
|
||||
assert body[key] == test_data[key], body
|
||||
for key in headers:
|
||||
if key != "Content-Type":
|
||||
attribute_key = key[3:]
|
||||
assert r.headers[attribute_key] == headers[key]
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_emit_structured_event(specversion):
|
||||
headers = {"Content-Type": "application/cloudevents+json"}
|
||||
body = {
|
||||
"id": "my-id",
|
||||
"source": "<event-source>",
|
||||
"type": "cloudevent.event.type",
|
||||
"specversion": specversion,
|
||||
"data": test_data,
|
||||
}
|
||||
_, r = app.test_client.post("/event", headers=headers, data=json.dumps(body))
|
||||
|
||||
# Convert byte array to dict
|
||||
# e.g. r.body = b'{"payload-content": "Hello World!"}'
|
||||
body = json.loads(r.body.decode("utf-8"))
|
||||
|
||||
# Check response fields
|
||||
for key in test_data:
|
||||
assert body[key] == test_data[key]
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"converter", [converters.TypeBinary, converters.TypeStructured]
|
||||
)
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_roundtrip_non_json_event(converter, specversion):
|
||||
input_data = io.BytesIO()
|
||||
for _ in range(100):
|
||||
for j in range(20):
|
||||
assert 1 == input_data.write(j.to_bytes(1, byteorder="big"))
|
||||
compressed_data = bz2.compress(input_data.getvalue())
|
||||
attrs = {"source": "test", "type": "t"}
|
||||
|
||||
event = CloudEvent(attrs, compressed_data)
|
||||
|
||||
if converter == converters.TypeStructured:
|
||||
headers, data = to_structured(event, data_marshaller=lambda x: x)
|
||||
elif converter == converters.TypeBinary:
|
||||
headers, data = to_binary(event, data_marshaller=lambda x: x)
|
||||
|
||||
headers["binary-payload"] = "true" # Decoding hint for server
|
||||
_, r = app.test_client.post("/event", headers=headers, data=data)
|
||||
|
||||
assert r.status_code == 200
|
||||
for key in attrs:
|
||||
assert r.headers[key] == attrs[key]
|
||||
assert compressed_data == r.body, r.body
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_missing_ce_prefix_binary_event(specversion):
|
||||
prefixed_headers = {}
|
||||
headers = {
|
||||
"ce-id": "my-id",
|
||||
"ce-source": "<event-source>",
|
||||
"ce-type": "cloudevent.event.type",
|
||||
"ce-specversion": specversion,
|
||||
}
|
||||
for key in headers:
|
||||
|
||||
# breaking prefix e.g. e-id instead of ce-id
|
||||
prefixed_headers[key[1:]] = headers[key]
|
||||
|
||||
with pytest.raises(cloud_exceptions.MissingRequiredFields):
|
||||
# CloudEvent constructor throws TypeError if missing required field
|
||||
# and NotImplementedError because structured calls aren't
|
||||
# implemented. In this instance one of the required keys should have
|
||||
# prefix e-id instead of ce-id therefore it should throw
|
||||
_ = from_http(prefixed_headers, json.dumps(test_data))
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_valid_binary_events(specversion):
|
||||
# Test creating multiple cloud events
|
||||
events_queue = []
|
||||
headers = {}
|
||||
num_cloudevents = 30
|
||||
for i in range(num_cloudevents):
|
||||
headers = {
|
||||
"ce-id": f"id{i}",
|
||||
"ce-source": f"source{i}.com.test",
|
||||
"ce-type": "cloudevent.test.type",
|
||||
"ce-specversion": specversion,
|
||||
}
|
||||
data = {"payload": f"payload-{i}"}
|
||||
events_queue.append(from_http(headers, json.dumps(data)))
|
||||
|
||||
for i, event in enumerate(events_queue):
|
||||
data = event.data
|
||||
assert event["id"] == f"id{i}"
|
||||
assert event["source"] == f"source{i}.com.test"
|
||||
assert event["specversion"] == specversion
|
||||
assert event.data["payload"] == f"payload-{i}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_structured_to_request(specversion):
|
||||
attributes = {
|
||||
"specversion": specversion,
|
||||
"type": "word.found.name",
|
||||
"id": "96fb5f0b-001e-0108-6dfe-da6e2806f124",
|
||||
"source": "pytest",
|
||||
}
|
||||
data = {"message": "Hello World!"}
|
||||
|
||||
event = CloudEvent(attributes, data)
|
||||
headers, body_bytes = to_structured(event)
|
||||
assert isinstance(body_bytes, bytes)
|
||||
body = json.loads(body_bytes)
|
||||
|
||||
assert headers["content-type"] == "application/cloudevents+json"
|
||||
for key in attributes:
|
||||
assert body[key] == attributes[key]
|
||||
assert body["data"] == data, f"|{body_bytes}|| {body}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_binary_to_request(specversion):
|
||||
attributes = {
|
||||
"specversion": specversion,
|
||||
"type": "word.found.name",
|
||||
"id": "96fb5f0b-001e-0108-6dfe-da6e2806f124",
|
||||
"source": "pytest",
|
||||
}
|
||||
data = {"message": "Hello World!"}
|
||||
event = CloudEvent(attributes, data)
|
||||
headers, body_bytes = to_binary(event)
|
||||
body = json.loads(body_bytes)
|
||||
|
||||
for key in data:
|
||||
assert body[key] == data[key]
|
||||
for key in attributes:
|
||||
assert attributes[key] == headers["ce-" + key]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_empty_data_structured_event(specversion):
|
||||
# Testing if cloudevent breaks when no structured data field present
|
||||
attributes = {
|
||||
"specversion": specversion,
|
||||
"datacontenttype": "application/cloudevents+json",
|
||||
"type": "word.found.name",
|
||||
"id": "96fb5f0b-001e-0108-6dfe-da6e2806f124",
|
||||
"time": "2018-10-23T12:28:22.4579346Z",
|
||||
"source": "<source-url>",
|
||||
}
|
||||
|
||||
event = from_http(
|
||||
{"content-type": "application/cloudevents+json"}, json.dumps(attributes)
|
||||
)
|
||||
assert event.data is None
|
||||
|
||||
attributes["data"] = ""
|
||||
# Data of empty string will be marshalled into None
|
||||
event = from_http(
|
||||
{"content-type": "application/cloudevents+json"}, json.dumps(attributes)
|
||||
)
|
||||
assert event.data is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_empty_data_binary_event(specversion):
|
||||
# Testing if cloudevent breaks when no structured data field present
|
||||
headers = {
|
||||
"Content-Type": "application/octet-stream",
|
||||
"ce-specversion": specversion,
|
||||
"ce-type": "word.found.name",
|
||||
"ce-id": "96fb5f0b-001e-0108-6dfe-da6e2806f124",
|
||||
"ce-time": "2018-10-23T12:28:22.4579346Z",
|
||||
"ce-source": "<source-url>",
|
||||
}
|
||||
event = from_http(headers, None)
|
||||
assert event.data is None
|
||||
|
||||
data = ""
|
||||
# Data of empty string will be marshalled into None
|
||||
event = from_http(headers, data)
|
||||
assert event.data is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_valid_structured_events(specversion):
|
||||
# Test creating multiple cloud events
|
||||
events_queue = []
|
||||
num_cloudevents = 30
|
||||
for i in range(num_cloudevents):
|
||||
event = {
|
||||
"id": f"id{i}",
|
||||
"source": f"source{i}.com.test",
|
||||
"type": "cloudevent.test.type",
|
||||
"specversion": specversion,
|
||||
"data": {"payload": f"payload-{i}"},
|
||||
}
|
||||
events_queue.append(
|
||||
from_http(
|
||||
{"content-type": "application/cloudevents+json"},
|
||||
json.dumps(event),
|
||||
)
|
||||
)
|
||||
|
||||
for i, event in enumerate(events_queue):
|
||||
assert event["id"] == f"id{i}"
|
||||
assert event["source"] == f"source{i}.com.test"
|
||||
assert event["specversion"] == specversion
|
||||
assert event.data["payload"] == f"payload-{i}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_structured_no_content_type(specversion):
|
||||
# Test creating multiple cloud events
|
||||
data = {
|
||||
"id": "id",
|
||||
"source": "source.com.test",
|
||||
"type": "cloudevent.test.type",
|
||||
"specversion": specversion,
|
||||
"data": test_data,
|
||||
}
|
||||
event = from_http({}, json.dumps(data))
|
||||
|
||||
assert event["id"] == "id"
|
||||
assert event["source"] == "source.com.test"
|
||||
assert event["specversion"] == specversion
|
||||
for key, val in test_data.items():
|
||||
assert event.data[key] == val
|
||||
|
||||
|
||||
def test_is_binary():
|
||||
headers = {
|
||||
"ce-id": "my-id",
|
||||
"ce-source": "<event-source>",
|
||||
"ce-type": "cloudevent.event.type",
|
||||
"ce-specversion": "1.0",
|
||||
"Content-Type": "text/plain",
|
||||
}
|
||||
assert is_binary(headers)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/cloudevents+json",
|
||||
}
|
||||
assert not is_binary(headers)
|
||||
|
||||
headers = {}
|
||||
assert not is_binary(headers)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_cloudevent_repr(specversion):
|
||||
headers = {
|
||||
"Content-Type": "application/octet-stream",
|
||||
"ce-specversion": specversion,
|
||||
"ce-type": "word.found.name",
|
||||
"ce-id": "96fb5f0b-001e-0108-6dfe-da6e2806f124",
|
||||
"ce-time": "2018-10-23T12:28:22.4579346Z",
|
||||
"ce-source": "<source-url>",
|
||||
}
|
||||
event = from_http(headers, "")
|
||||
# Testing to make sure event is printable. I could runevent. __repr__() but
|
||||
# we had issues in the past where event.__repr__() could run but
|
||||
# print(event) would fail.
|
||||
print(event)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
|
||||
def test_none_data_cloudevent(specversion):
|
||||
event = CloudEvent(
|
||||
{
|
||||
"source": "<my-url>",
|
||||
"type": "issue.example",
|
||||
"specversion": specversion,
|
||||
}
|
||||
)
|
||||
to_binary(event)
|
||||
to_structured(event)
|
||||
|
||||
|
||||
def test_wrong_specversion():
|
||||
headers = {"Content-Type": "application/cloudevents+json"}
|
||||
data = json.dumps(
|
||||
{
|
||||
"specversion": "0.2",
|
||||
"type": "word.found.name",
|
||||
"id": "96fb5f0b-001e-0108-6dfe-da6e2806f124",
|
||||
"source": "<my-source>",
|
||||
}
|
||||
)
|
||||
with pytest.raises(cloud_exceptions.InvalidRequiredFields) as e:
|
||||
from_http(headers, data)
|
||||
assert "Found invalid specversion 0.2" in str(e.value)
|
||||
|
||||
|
||||
def test_invalid_data_format_structured_from_http():
|
||||
headers = {"Content-Type": "application/cloudevents+json"}
|
||||
data = 20
|
||||
with pytest.raises(cloud_exceptions.InvalidStructuredJSON) as e:
|
||||
from_http(headers, data)
|
||||
assert "Expected json of type (str, bytes, bytearray)" in str(e.value)
|
||||
|
||||
|
||||
def test_wrong_specversion_to_request():
|
||||
event = CloudEvent({"source": "s", "type": "t"}, None)
|
||||
with pytest.raises(cloud_exceptions.InvalidRequiredFields) as e:
|
||||
event["specversion"] = "0.2"
|
||||
to_binary(event)
|
||||
assert "Unsupported specversion: 0.2" in str(e.value)
|
||||
|
||||
|
||||
def test_is_structured():
|
||||
headers = {
|
||||
"Content-Type": "application/cloudevents+json",
|
||||
}
|
||||
assert is_structured(headers)
|
||||
|
||||
headers = {
|
||||
"ce-id": "my-id",
|
||||
"ce-source": "<event-source>",
|
||||
"ce-type": "cloudevent.event.type",
|
||||
"ce-specversion": "1.0",
|
||||
"Content-Type": "text/plain",
|
||||
}
|
||||
assert not is_structured(headers)
|
||||
|
||||
|
||||
def test_empty_json_structured():
|
||||
headers = {"Content-Type": "application/cloudevents+json"}
|
||||
data = ""
|
||||
with pytest.raises(cloud_exceptions.MissingRequiredFields) as e:
|
||||
from_http(headers, data)
|
||||
assert "Failed to read specversion from both headers and data" in str(e.value)
|
||||
|
||||
|
||||
def test_uppercase_headers_with_none_data_binary():
|
||||
headers = {
|
||||
"Ce-Id": "my-id",
|
||||
"Ce-Source": "<event-source>",
|
||||
"Ce-Type": "cloudevent.event.type",
|
||||
"Ce-Specversion": "1.0",
|
||||
}
|
||||
event = from_http(headers, None)
|
||||
|
||||
for key in headers:
|
||||
assert event[key.lower()[3:]] == headers[key]
|
||||
assert event.data is None
|
||||
|
||||
_, new_data = to_binary(event)
|
||||
assert new_data is None
|
||||
|
||||
|
||||
def test_generic_exception():
|
||||
headers = {"Content-Type": "application/cloudevents+json"}
|
||||
data = json.dumps(
|
||||
{
|
||||
"specversion": "1.0",
|
||||
"source": "s",
|
||||
"type": "t",
|
||||
"id": "1234-1234-1234",
|
||||
"data": "",
|
||||
}
|
||||
)
|
||||
with pytest.raises(cloud_exceptions.GenericException) as e:
|
||||
from_http({}, None)
|
||||
e.errisinstance(cloud_exceptions.MissingRequiredFields)
|
||||
|
||||
with pytest.raises(cloud_exceptions.GenericException) as e:
|
||||
from_http({}, 123)
|
||||
e.errisinstance(cloud_exceptions.InvalidStructuredJSON)
|
||||
|
||||
with pytest.raises(cloud_exceptions.GenericException) as e:
|
||||
from_http(headers, data, data_unmarshaller=lambda x: 1 / 0)
|
||||
e.errisinstance(cloud_exceptions.DataUnmarshallerError)
|
||||
|
||||
with pytest.raises(cloud_exceptions.GenericException) as e:
|
||||
event = from_http(headers, data)
|
||||
to_binary(event, data_marshaller=lambda x: 1 / 0)
|
||||
e.errisinstance(cloud_exceptions.DataMarshallerError)
|
||||
|
||||
|
||||
def test_non_dict_data_no_headers_bug():
|
||||
# Test for issue #116
|
||||
headers = {"Content-Type": "application/cloudevents+json"}
|
||||
data = "123"
|
||||
with pytest.raises(cloud_exceptions.MissingRequiredFields) as e:
|
||||
from_http(headers, data)
|
||||
assert "Failed to read specversion from both headers and data" in str(e.value)
|
||||
assert "The following deserialized data has no 'get' method" in str(e.value)
|
|
@ -13,3 +13,5 @@ aiohttp
|
|||
Pillow
|
||||
requests
|
||||
flask
|
||||
pydantic>=1.0.0<1.9.0; python_version <= '3.6'
|
||||
pydantic>=1.0.0<2.0; python_version > '3.6'
|
||||
|
|
6
setup.py
6
setup.py
|
@ -69,4 +69,10 @@ if __name__ == "__main__":
|
|||
packages=find_packages(exclude=["cloudevents.tests"]),
|
||||
version=pypi_config["version_target"],
|
||||
install_requires=["deprecation>=2.0,<3.0"],
|
||||
extras_require={
|
||||
"pydantic": [
|
||||
"pydantic>=1.0.0<1.9.0; python_version <= '3.6'",
|
||||
"pydantic>=1.0.0<2.0; python_version > '3.6'",
|
||||
],
|
||||
},
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue